diff --git a/.gitignore b/.gitignore index 6ad63f7d..57dcaeb9 100644 --- a/.gitignore +++ b/.gitignore @@ -5,7 +5,6 @@ _site vendor mmp .venv -venv .bundle src -assets/js/creations/server/data \ No newline at end of file +assets/js/creations/server/data diff --git a/scripts/handler/__pycache__/utils.cpython-312.pyc b/scripts/handler/__pycache__/utils.cpython-312.pyc index c6cf2dab..b042a02d 100644 Binary files a/scripts/handler/__pycache__/utils.cpython-312.pyc and b/scripts/handler/__pycache__/utils.cpython-312.pyc differ diff --git a/scripts/handler/utils.py b/scripts/handler/utils.py index a0c02514..927b705a 100644 --- a/scripts/handler/utils.py +++ b/scripts/handler/utils.py @@ -7,10 +7,13 @@ import os #MMP_FOLDER = "/nethome/jotachina/projetos/src_mmpSearch/mmp" #MMPZ_FOLDER = "/nethome/jotachina/projetos/src_mmpSearch/mmpz" #WAV_FOLDER = "/nethome/jotachina/projetos/src_mmpSearch/wav" -BASE_PATH = "/var/html/www/trens/mmpSearch" -MMP_FOLDER = "/var/html/www/trens/mmpSearch/src_mmpSearch/mmp" -MMPZ_FOLDER = "/var/html/www/trens/mmpSearch/src_mmpSearch/mmpz" -WAV_FOLDER = "/var/html/www/trens/mmpSearch/src_mmpSearch/wav" +#### +# Corrigir caminhos do site global (user www-data) +#### +BASE_PATH = "/mmpSearch" +MMP_FOLDER = "src_mmpSearch/mmp" +MMPZ_FOLDER = "src_mmpSearch/mmpz" +WAV_FOLDER = "src_mmpSearch/wav" METADATA_FOLDER = os.path.join(BASE_PATH, "metadata") DATA_FOLDER = os.path.join(BASE_PATH, "_data") CERT_PATH = "/etc/letsencrypt/live/alice.ufsj.edu.br/fullchain.pem" diff --git a/venv/bin/tqdm b/venv/bin/tqdm new file mode 100755 index 00000000..ed0d9368 --- /dev/null +++ b/venv/bin/tqdm @@ -0,0 +1,8 @@ +#!/nethome/jotachina/projetos/mmpSearch/venv/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from tqdm.cli import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/lib/python3.12/site-packages/OpenSSL/SSL.py b/venv/lib/python3.12/site-packages/OpenSSL/SSL.py new file mode 100644 index 00000000..51c60d55 --- /dev/null +++ b/venv/lib/python3.12/site-packages/OpenSSL/SSL.py @@ -0,0 +1,3239 @@ +from __future__ import annotations + +import os +import socket +import typing +import warnings +from collections.abc import Sequence +from errno import errorcode +from functools import partial, wraps +from itertools import chain, count +from sys import platform +from typing import Any, Callable, Optional, TypeVar +from weakref import WeakValueDictionary + +from cryptography import x509 +from cryptography.hazmat.primitives.asymmetric import ec + +from OpenSSL._util import ( + StrOrBytesPath as _StrOrBytesPath, +) +from OpenSSL._util import ( + exception_from_error_queue as _exception_from_error_queue, +) +from OpenSSL._util import ( + ffi as _ffi, +) +from OpenSSL._util import ( + lib as _lib, +) +from OpenSSL._util import ( + make_assert as _make_assert, +) +from OpenSSL._util import ( + no_zero_allocator as _no_zero_allocator, +) +from OpenSSL._util import ( + path_bytes as _path_bytes, +) +from OpenSSL._util import ( + text_to_bytes_and_warn as _text_to_bytes_and_warn, +) +from OpenSSL.crypto import ( + FILETYPE_PEM, + X509, + PKey, + X509Name, + X509Store, + _EllipticCurve, + _PassphraseHelper, + _PrivateKey, +) + +__all__ = [ + "DTLS_CLIENT_METHOD", + "DTLS_METHOD", + "DTLS_SERVER_METHOD", + "MODE_RELEASE_BUFFERS", + "NO_OVERLAPPING_PROTOCOLS", + "OPENSSL_BUILT_ON", + "OPENSSL_CFLAGS", + "OPENSSL_DIR", + "OPENSSL_PLATFORM", + "OPENSSL_VERSION", + "OPENSSL_VERSION_NUMBER", + "OP_ALL", + "OP_CIPHER_SERVER_PREFERENCE", + "OP_COOKIE_EXCHANGE", + "OP_DONT_INSERT_EMPTY_FRAGMENTS", + "OP_EPHEMERAL_RSA", + "OP_MICROSOFT_BIG_SSLV3_BUFFER", + "OP_MICROSOFT_SESS_ID_BUG", + "OP_MSIE_SSLV2_RSA_PADDING", + "OP_NETSCAPE_CA_DN_BUG", + "OP_NETSCAPE_CHALLENGE_BUG", + "OP_NETSCAPE_DEMO_CIPHER_CHANGE_BUG", + "OP_NETSCAPE_REUSE_CIPHER_CHANGE_BUG", + "OP_NO_COMPRESSION", + "OP_NO_QUERY_MTU", + "OP_NO_TICKET", + "OP_PKCS1_CHECK_1", + "OP_PKCS1_CHECK_2", + "OP_SINGLE_DH_USE", + "OP_SINGLE_ECDH_USE", + "OP_SSLEAY_080_CLIENT_DH_BUG", + "OP_SSLREF2_REUSE_CERT_TYPE_BUG", + "OP_TLS_BLOCK_PADDING_BUG", + "OP_TLS_D5_BUG", + "OP_TLS_ROLLBACK_BUG", + "RECEIVED_SHUTDOWN", + "SENT_SHUTDOWN", + "SESS_CACHE_BOTH", + "SESS_CACHE_CLIENT", + "SESS_CACHE_NO_AUTO_CLEAR", + "SESS_CACHE_NO_INTERNAL", + "SESS_CACHE_NO_INTERNAL_LOOKUP", + "SESS_CACHE_NO_INTERNAL_STORE", + "SESS_CACHE_OFF", + "SESS_CACHE_SERVER", + "SSL3_VERSION", + "SSLEAY_BUILT_ON", + "SSLEAY_CFLAGS", + "SSLEAY_DIR", + "SSLEAY_PLATFORM", + "SSLEAY_VERSION", + "SSL_CB_ACCEPT_EXIT", + "SSL_CB_ACCEPT_LOOP", + "SSL_CB_ALERT", + "SSL_CB_CONNECT_EXIT", + "SSL_CB_CONNECT_LOOP", + "SSL_CB_EXIT", + "SSL_CB_HANDSHAKE_DONE", + "SSL_CB_HANDSHAKE_START", + "SSL_CB_LOOP", + "SSL_CB_READ", + "SSL_CB_READ_ALERT", + "SSL_CB_WRITE", + "SSL_CB_WRITE_ALERT", + "SSL_ST_ACCEPT", + "SSL_ST_CONNECT", + "SSL_ST_MASK", + "TLS1_1_VERSION", + "TLS1_2_VERSION", + "TLS1_3_VERSION", + "TLS1_VERSION", + "TLS_CLIENT_METHOD", + "TLS_METHOD", + "TLS_SERVER_METHOD", + "VERIFY_CLIENT_ONCE", + "VERIFY_FAIL_IF_NO_PEER_CERT", + "VERIFY_NONE", + "VERIFY_PEER", + "Connection", + "Context", + "Error", + "OP_NO_SSLv2", + "OP_NO_SSLv3", + "OP_NO_TLSv1", + "OP_NO_TLSv1_1", + "OP_NO_TLSv1_2", + "OP_NO_TLSv1_3", + "SSLeay_version", + "SSLv23_METHOD", + "Session", + "SysCallError", + "TLSv1_1_METHOD", + "TLSv1_2_METHOD", + "TLSv1_METHOD", + "WantReadError", + "WantWriteError", + "WantX509LookupError", + "X509VerificationCodes", + "ZeroReturnError", +] + + +OPENSSL_VERSION_NUMBER: int = _lib.OPENSSL_VERSION_NUMBER +OPENSSL_VERSION: int = _lib.OPENSSL_VERSION +OPENSSL_CFLAGS: int = _lib.OPENSSL_CFLAGS +OPENSSL_PLATFORM: int = _lib.OPENSSL_PLATFORM +OPENSSL_DIR: int = _lib.OPENSSL_DIR +OPENSSL_BUILT_ON: int = _lib.OPENSSL_BUILT_ON + +SSLEAY_VERSION = OPENSSL_VERSION +SSLEAY_CFLAGS = OPENSSL_CFLAGS +SSLEAY_PLATFORM = OPENSSL_PLATFORM +SSLEAY_DIR = OPENSSL_DIR +SSLEAY_BUILT_ON = OPENSSL_BUILT_ON + +SENT_SHUTDOWN = _lib.SSL_SENT_SHUTDOWN +RECEIVED_SHUTDOWN = _lib.SSL_RECEIVED_SHUTDOWN + +SSLv23_METHOD = 3 +TLSv1_METHOD = 4 +TLSv1_1_METHOD = 5 +TLSv1_2_METHOD = 6 +TLS_METHOD = 7 +TLS_SERVER_METHOD = 8 +TLS_CLIENT_METHOD = 9 +DTLS_METHOD = 10 +DTLS_SERVER_METHOD = 11 +DTLS_CLIENT_METHOD = 12 + +SSL3_VERSION: int = _lib.SSL3_VERSION +TLS1_VERSION: int = _lib.TLS1_VERSION +TLS1_1_VERSION: int = _lib.TLS1_1_VERSION +TLS1_2_VERSION: int = _lib.TLS1_2_VERSION +TLS1_3_VERSION: int = _lib.TLS1_3_VERSION + +OP_NO_SSLv2: int = _lib.SSL_OP_NO_SSLv2 +OP_NO_SSLv3: int = _lib.SSL_OP_NO_SSLv3 +OP_NO_TLSv1: int = _lib.SSL_OP_NO_TLSv1 +OP_NO_TLSv1_1: int = _lib.SSL_OP_NO_TLSv1_1 +OP_NO_TLSv1_2: int = _lib.SSL_OP_NO_TLSv1_2 +OP_NO_TLSv1_3: int = _lib.SSL_OP_NO_TLSv1_3 + +MODE_RELEASE_BUFFERS: int = _lib.SSL_MODE_RELEASE_BUFFERS + +OP_SINGLE_DH_USE: int = _lib.SSL_OP_SINGLE_DH_USE +OP_SINGLE_ECDH_USE: int = _lib.SSL_OP_SINGLE_ECDH_USE +OP_EPHEMERAL_RSA: int = _lib.SSL_OP_EPHEMERAL_RSA +OP_MICROSOFT_SESS_ID_BUG: int = _lib.SSL_OP_MICROSOFT_SESS_ID_BUG +OP_NETSCAPE_CHALLENGE_BUG: int = _lib.SSL_OP_NETSCAPE_CHALLENGE_BUG +OP_NETSCAPE_REUSE_CIPHER_CHANGE_BUG: int = ( + _lib.SSL_OP_NETSCAPE_REUSE_CIPHER_CHANGE_BUG +) +OP_SSLREF2_REUSE_CERT_TYPE_BUG: int = _lib.SSL_OP_SSLREF2_REUSE_CERT_TYPE_BUG +OP_MICROSOFT_BIG_SSLV3_BUFFER: int = _lib.SSL_OP_MICROSOFT_BIG_SSLV3_BUFFER +OP_MSIE_SSLV2_RSA_PADDING: int = _lib.SSL_OP_MSIE_SSLV2_RSA_PADDING +OP_SSLEAY_080_CLIENT_DH_BUG: int = _lib.SSL_OP_SSLEAY_080_CLIENT_DH_BUG +OP_TLS_D5_BUG: int = _lib.SSL_OP_TLS_D5_BUG +OP_TLS_BLOCK_PADDING_BUG: int = _lib.SSL_OP_TLS_BLOCK_PADDING_BUG +OP_DONT_INSERT_EMPTY_FRAGMENTS: int = _lib.SSL_OP_DONT_INSERT_EMPTY_FRAGMENTS +OP_CIPHER_SERVER_PREFERENCE: int = _lib.SSL_OP_CIPHER_SERVER_PREFERENCE +OP_TLS_ROLLBACK_BUG: int = _lib.SSL_OP_TLS_ROLLBACK_BUG +OP_PKCS1_CHECK_1 = _lib.SSL_OP_PKCS1_CHECK_1 +OP_PKCS1_CHECK_2: int = _lib.SSL_OP_PKCS1_CHECK_2 +OP_NETSCAPE_CA_DN_BUG: int = _lib.SSL_OP_NETSCAPE_CA_DN_BUG +OP_NETSCAPE_DEMO_CIPHER_CHANGE_BUG: int = ( + _lib.SSL_OP_NETSCAPE_DEMO_CIPHER_CHANGE_BUG +) +OP_NO_COMPRESSION: int = _lib.SSL_OP_NO_COMPRESSION + +OP_NO_QUERY_MTU: int = _lib.SSL_OP_NO_QUERY_MTU +OP_COOKIE_EXCHANGE: int = _lib.SSL_OP_COOKIE_EXCHANGE +OP_NO_TICKET: int = _lib.SSL_OP_NO_TICKET + +try: + OP_NO_RENEGOTIATION: int = _lib.SSL_OP_NO_RENEGOTIATION + __all__.append("OP_NO_RENEGOTIATION") +except AttributeError: + pass + +try: + OP_IGNORE_UNEXPECTED_EOF: int = _lib.SSL_OP_IGNORE_UNEXPECTED_EOF + __all__.append("OP_IGNORE_UNEXPECTED_EOF") +except AttributeError: + pass + +try: + OP_LEGACY_SERVER_CONNECT: int = _lib.SSL_OP_LEGACY_SERVER_CONNECT + __all__.append("OP_LEGACY_SERVER_CONNECT") +except AttributeError: + pass + +OP_ALL: int = _lib.SSL_OP_ALL + +VERIFY_PEER: int = _lib.SSL_VERIFY_PEER +VERIFY_FAIL_IF_NO_PEER_CERT: int = _lib.SSL_VERIFY_FAIL_IF_NO_PEER_CERT +VERIFY_CLIENT_ONCE: int = _lib.SSL_VERIFY_CLIENT_ONCE +VERIFY_NONE: int = _lib.SSL_VERIFY_NONE + +SESS_CACHE_OFF: int = _lib.SSL_SESS_CACHE_OFF +SESS_CACHE_CLIENT: int = _lib.SSL_SESS_CACHE_CLIENT +SESS_CACHE_SERVER: int = _lib.SSL_SESS_CACHE_SERVER +SESS_CACHE_BOTH: int = _lib.SSL_SESS_CACHE_BOTH +SESS_CACHE_NO_AUTO_CLEAR: int = _lib.SSL_SESS_CACHE_NO_AUTO_CLEAR +SESS_CACHE_NO_INTERNAL_LOOKUP: int = _lib.SSL_SESS_CACHE_NO_INTERNAL_LOOKUP +SESS_CACHE_NO_INTERNAL_STORE: int = _lib.SSL_SESS_CACHE_NO_INTERNAL_STORE +SESS_CACHE_NO_INTERNAL: int = _lib.SSL_SESS_CACHE_NO_INTERNAL + +SSL_ST_CONNECT: int = _lib.SSL_ST_CONNECT +SSL_ST_ACCEPT: int = _lib.SSL_ST_ACCEPT +SSL_ST_MASK: int = _lib.SSL_ST_MASK + +SSL_CB_LOOP: int = _lib.SSL_CB_LOOP +SSL_CB_EXIT: int = _lib.SSL_CB_EXIT +SSL_CB_READ: int = _lib.SSL_CB_READ +SSL_CB_WRITE: int = _lib.SSL_CB_WRITE +SSL_CB_ALERT: int = _lib.SSL_CB_ALERT +SSL_CB_READ_ALERT: int = _lib.SSL_CB_READ_ALERT +SSL_CB_WRITE_ALERT: int = _lib.SSL_CB_WRITE_ALERT +SSL_CB_ACCEPT_LOOP: int = _lib.SSL_CB_ACCEPT_LOOP +SSL_CB_ACCEPT_EXIT: int = _lib.SSL_CB_ACCEPT_EXIT +SSL_CB_CONNECT_LOOP: int = _lib.SSL_CB_CONNECT_LOOP +SSL_CB_CONNECT_EXIT: int = _lib.SSL_CB_CONNECT_EXIT +SSL_CB_HANDSHAKE_START: int = _lib.SSL_CB_HANDSHAKE_START +SSL_CB_HANDSHAKE_DONE: int = _lib.SSL_CB_HANDSHAKE_DONE + +_Buffer = typing.Union[bytes, bytearray, memoryview] +_T = TypeVar("_T") + + +class _NoOverlappingProtocols: + pass + + +NO_OVERLAPPING_PROTOCOLS = _NoOverlappingProtocols() + +# Callback types. +_ALPNSelectCallback = Callable[ + [ + "Connection", + typing.List[bytes], + ], + typing.Union[bytes, _NoOverlappingProtocols], +] +_CookieGenerateCallback = Callable[["Connection"], bytes] +_CookieVerifyCallback = Callable[["Connection", bytes], bool] +_OCSPClientCallback = Callable[["Connection", bytes, Optional[_T]], bool] +_OCSPServerCallback = Callable[["Connection", Optional[_T]], bytes] +_PassphraseCallback = Callable[[int, bool, Optional[_T]], bytes] +_VerifyCallback = Callable[["Connection", X509, int, int, int], bool] + + +class X509VerificationCodes: + """ + Success and error codes for X509 verification, as returned by the + underlying ``X509_STORE_CTX_get_error()`` function and passed by pyOpenSSL + to verification callback functions. + + See `OpenSSL Verification Errors + `_ + for details. + """ + + OK = _lib.X509_V_OK + ERR_UNABLE_TO_GET_ISSUER_CERT = _lib.X509_V_ERR_UNABLE_TO_GET_ISSUER_CERT + ERR_UNABLE_TO_GET_CRL = _lib.X509_V_ERR_UNABLE_TO_GET_CRL + ERR_UNABLE_TO_DECRYPT_CERT_SIGNATURE = ( + _lib.X509_V_ERR_UNABLE_TO_DECRYPT_CERT_SIGNATURE + ) + ERR_UNABLE_TO_DECRYPT_CRL_SIGNATURE = ( + _lib.X509_V_ERR_UNABLE_TO_DECRYPT_CRL_SIGNATURE + ) + ERR_UNABLE_TO_DECODE_ISSUER_PUBLIC_KEY = ( + _lib.X509_V_ERR_UNABLE_TO_DECODE_ISSUER_PUBLIC_KEY + ) + ERR_CERT_SIGNATURE_FAILURE = _lib.X509_V_ERR_CERT_SIGNATURE_FAILURE + ERR_CRL_SIGNATURE_FAILURE = _lib.X509_V_ERR_CRL_SIGNATURE_FAILURE + ERR_CERT_NOT_YET_VALID = _lib.X509_V_ERR_CERT_NOT_YET_VALID + ERR_CERT_HAS_EXPIRED = _lib.X509_V_ERR_CERT_HAS_EXPIRED + ERR_CRL_NOT_YET_VALID = _lib.X509_V_ERR_CRL_NOT_YET_VALID + ERR_CRL_HAS_EXPIRED = _lib.X509_V_ERR_CRL_HAS_EXPIRED + ERR_ERROR_IN_CERT_NOT_BEFORE_FIELD = ( + _lib.X509_V_ERR_ERROR_IN_CERT_NOT_BEFORE_FIELD + ) + ERR_ERROR_IN_CERT_NOT_AFTER_FIELD = ( + _lib.X509_V_ERR_ERROR_IN_CERT_NOT_AFTER_FIELD + ) + ERR_ERROR_IN_CRL_LAST_UPDATE_FIELD = ( + _lib.X509_V_ERR_ERROR_IN_CRL_LAST_UPDATE_FIELD + ) + ERR_ERROR_IN_CRL_NEXT_UPDATE_FIELD = ( + _lib.X509_V_ERR_ERROR_IN_CRL_NEXT_UPDATE_FIELD + ) + ERR_OUT_OF_MEM = _lib.X509_V_ERR_OUT_OF_MEM + ERR_DEPTH_ZERO_SELF_SIGNED_CERT = ( + _lib.X509_V_ERR_DEPTH_ZERO_SELF_SIGNED_CERT + ) + ERR_SELF_SIGNED_CERT_IN_CHAIN = _lib.X509_V_ERR_SELF_SIGNED_CERT_IN_CHAIN + ERR_UNABLE_TO_GET_ISSUER_CERT_LOCALLY = ( + _lib.X509_V_ERR_UNABLE_TO_GET_ISSUER_CERT_LOCALLY + ) + ERR_UNABLE_TO_VERIFY_LEAF_SIGNATURE = ( + _lib.X509_V_ERR_UNABLE_TO_VERIFY_LEAF_SIGNATURE + ) + ERR_CERT_CHAIN_TOO_LONG = _lib.X509_V_ERR_CERT_CHAIN_TOO_LONG + ERR_CERT_REVOKED = _lib.X509_V_ERR_CERT_REVOKED + ERR_INVALID_CA = _lib.X509_V_ERR_INVALID_CA + ERR_PATH_LENGTH_EXCEEDED = _lib.X509_V_ERR_PATH_LENGTH_EXCEEDED + ERR_INVALID_PURPOSE = _lib.X509_V_ERR_INVALID_PURPOSE + ERR_CERT_UNTRUSTED = _lib.X509_V_ERR_CERT_UNTRUSTED + ERR_CERT_REJECTED = _lib.X509_V_ERR_CERT_REJECTED + ERR_SUBJECT_ISSUER_MISMATCH = _lib.X509_V_ERR_SUBJECT_ISSUER_MISMATCH + ERR_AKID_SKID_MISMATCH = _lib.X509_V_ERR_AKID_SKID_MISMATCH + ERR_AKID_ISSUER_SERIAL_MISMATCH = ( + _lib.X509_V_ERR_AKID_ISSUER_SERIAL_MISMATCH + ) + ERR_KEYUSAGE_NO_CERTSIGN = _lib.X509_V_ERR_KEYUSAGE_NO_CERTSIGN + ERR_UNABLE_TO_GET_CRL_ISSUER = _lib.X509_V_ERR_UNABLE_TO_GET_CRL_ISSUER + ERR_UNHANDLED_CRITICAL_EXTENSION = ( + _lib.X509_V_ERR_UNHANDLED_CRITICAL_EXTENSION + ) + ERR_KEYUSAGE_NO_CRL_SIGN = _lib.X509_V_ERR_KEYUSAGE_NO_CRL_SIGN + ERR_UNHANDLED_CRITICAL_CRL_EXTENSION = ( + _lib.X509_V_ERR_UNHANDLED_CRITICAL_CRL_EXTENSION + ) + ERR_INVALID_NON_CA = _lib.X509_V_ERR_INVALID_NON_CA + ERR_PROXY_PATH_LENGTH_EXCEEDED = _lib.X509_V_ERR_PROXY_PATH_LENGTH_EXCEEDED + ERR_KEYUSAGE_NO_DIGITAL_SIGNATURE = ( + _lib.X509_V_ERR_KEYUSAGE_NO_DIGITAL_SIGNATURE + ) + ERR_PROXY_CERTIFICATES_NOT_ALLOWED = ( + _lib.X509_V_ERR_PROXY_CERTIFICATES_NOT_ALLOWED + ) + ERR_INVALID_EXTENSION = _lib.X509_V_ERR_INVALID_EXTENSION + ERR_INVALID_POLICY_EXTENSION = _lib.X509_V_ERR_INVALID_POLICY_EXTENSION + ERR_NO_EXPLICIT_POLICY = _lib.X509_V_ERR_NO_EXPLICIT_POLICY + ERR_DIFFERENT_CRL_SCOPE = _lib.X509_V_ERR_DIFFERENT_CRL_SCOPE + ERR_UNSUPPORTED_EXTENSION_FEATURE = ( + _lib.X509_V_ERR_UNSUPPORTED_EXTENSION_FEATURE + ) + ERR_UNNESTED_RESOURCE = _lib.X509_V_ERR_UNNESTED_RESOURCE + ERR_PERMITTED_VIOLATION = _lib.X509_V_ERR_PERMITTED_VIOLATION + ERR_EXCLUDED_VIOLATION = _lib.X509_V_ERR_EXCLUDED_VIOLATION + ERR_SUBTREE_MINMAX = _lib.X509_V_ERR_SUBTREE_MINMAX + ERR_UNSUPPORTED_CONSTRAINT_TYPE = ( + _lib.X509_V_ERR_UNSUPPORTED_CONSTRAINT_TYPE + ) + ERR_UNSUPPORTED_CONSTRAINT_SYNTAX = ( + _lib.X509_V_ERR_UNSUPPORTED_CONSTRAINT_SYNTAX + ) + ERR_UNSUPPORTED_NAME_SYNTAX = _lib.X509_V_ERR_UNSUPPORTED_NAME_SYNTAX + ERR_CRL_PATH_VALIDATION_ERROR = _lib.X509_V_ERR_CRL_PATH_VALIDATION_ERROR + ERR_HOSTNAME_MISMATCH = _lib.X509_V_ERR_HOSTNAME_MISMATCH + ERR_EMAIL_MISMATCH = _lib.X509_V_ERR_EMAIL_MISMATCH + ERR_IP_ADDRESS_MISMATCH = _lib.X509_V_ERR_IP_ADDRESS_MISMATCH + ERR_APPLICATION_VERIFICATION = _lib.X509_V_ERR_APPLICATION_VERIFICATION + + +# Taken from https://golang.org/src/crypto/x509/root_linux.go +_CERTIFICATE_FILE_LOCATIONS = [ + "/etc/ssl/certs/ca-certificates.crt", # Debian/Ubuntu/Gentoo etc. + "/etc/pki/tls/certs/ca-bundle.crt", # Fedora/RHEL 6 + "/etc/ssl/ca-bundle.pem", # OpenSUSE + "/etc/pki/tls/cacert.pem", # OpenELEC + "/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem", # CentOS/RHEL 7 +] + +_CERTIFICATE_PATH_LOCATIONS = [ + "/etc/ssl/certs", # SLES10/SLES11 +] + +# These values are compared to output from cffi's ffi.string so they must be +# byte strings. +_CRYPTOGRAPHY_MANYLINUX_CA_DIR = b"/opt/pyca/cryptography/openssl/certs" +_CRYPTOGRAPHY_MANYLINUX_CA_FILE = b"/opt/pyca/cryptography/openssl/cert.pem" + + +class Error(Exception): + """ + An error occurred in an `OpenSSL.SSL` API. + """ + + +_raise_current_error = partial(_exception_from_error_queue, Error) +_openssl_assert = _make_assert(Error) + + +class WantReadError(Error): + pass + + +class WantWriteError(Error): + pass + + +class WantX509LookupError(Error): + pass + + +class ZeroReturnError(Error): + pass + + +class SysCallError(Error): + pass + + +class _CallbackExceptionHelper: + """ + A base class for wrapper classes that allow for intelligent exception + handling in OpenSSL callbacks. + + :ivar list _problems: Any exceptions that occurred while executing in a + context where they could not be raised in the normal way. Typically + this is because OpenSSL has called into some Python code and requires a + return value. The exceptions are saved to be raised later when it is + possible to do so. + """ + + def __init__(self) -> None: + self._problems: list[Exception] = [] + + def raise_if_problem(self) -> None: + """ + Raise an exception from the OpenSSL error queue or that was previously + captured whe running a callback. + """ + if self._problems: + try: + _raise_current_error() + except Error: + pass + raise self._problems.pop(0) + + +class _VerifyHelper(_CallbackExceptionHelper): + """ + Wrap a callback such that it can be used as a certificate verification + callback. + """ + + def __init__(self, callback: _VerifyCallback) -> None: + _CallbackExceptionHelper.__init__(self) + + @wraps(callback) + def wrapper(ok, store_ctx): # type: ignore[no-untyped-def] + x509 = _lib.X509_STORE_CTX_get_current_cert(store_ctx) + _lib.X509_up_ref(x509) + cert = X509._from_raw_x509_ptr(x509) + error_number = _lib.X509_STORE_CTX_get_error(store_ctx) + error_depth = _lib.X509_STORE_CTX_get_error_depth(store_ctx) + + index = _lib.SSL_get_ex_data_X509_STORE_CTX_idx() + ssl = _lib.X509_STORE_CTX_get_ex_data(store_ctx, index) + connection = Connection._reverse_mapping[ssl] + + try: + result = callback( + connection, cert, error_number, error_depth, ok + ) + except Exception as e: + self._problems.append(e) + return 0 + else: + if result: + _lib.X509_STORE_CTX_set_error(store_ctx, _lib.X509_V_OK) + return 1 + else: + return 0 + + self.callback = _ffi.callback( + "int (*)(int, X509_STORE_CTX *)", wrapper + ) + + +class _ALPNSelectHelper(_CallbackExceptionHelper): + """ + Wrap a callback such that it can be used as an ALPN selection callback. + """ + + def __init__(self, callback: _ALPNSelectCallback) -> None: + _CallbackExceptionHelper.__init__(self) + + @wraps(callback) + def wrapper(ssl, out, outlen, in_, inlen, arg): # type: ignore[no-untyped-def] + try: + conn = Connection._reverse_mapping[ssl] + + # The string passed to us is made up of multiple + # length-prefixed bytestrings. We need to split that into a + # list. + instr = _ffi.buffer(in_, inlen)[:] + protolist = [] + while instr: + encoded_len = instr[0] + proto = instr[1 : encoded_len + 1] + protolist.append(proto) + instr = instr[encoded_len + 1 :] + + # Call the callback + outbytes = callback(conn, protolist) + any_accepted = True + if outbytes is NO_OVERLAPPING_PROTOCOLS: + outbytes = b"" + any_accepted = False + elif not isinstance(outbytes, bytes): + raise TypeError( + "ALPN callback must return a bytestring or the " + "special NO_OVERLAPPING_PROTOCOLS sentinel value." + ) + + # Save our callback arguments on the connection object to make + # sure that they don't get freed before OpenSSL can use them. + # Then, return them in the appropriate output parameters. + conn._alpn_select_callback_args = [ + _ffi.new("unsigned char *", len(outbytes)), + _ffi.new("unsigned char[]", outbytes), + ] + outlen[0] = conn._alpn_select_callback_args[0][0] + out[0] = conn._alpn_select_callback_args[1] + if not any_accepted: + return _lib.SSL_TLSEXT_ERR_NOACK + return _lib.SSL_TLSEXT_ERR_OK + except Exception as e: + self._problems.append(e) + return _lib.SSL_TLSEXT_ERR_ALERT_FATAL + + self.callback = _ffi.callback( + ( + "int (*)(SSL *, unsigned char **, unsigned char *, " + "const unsigned char *, unsigned int, void *)" + ), + wrapper, + ) + + +class _OCSPServerCallbackHelper(_CallbackExceptionHelper): + """ + Wrap a callback such that it can be used as an OCSP callback for the server + side. + + Annoyingly, OpenSSL defines one OCSP callback but uses it in two different + ways. For servers, that callback is expected to retrieve some OCSP data and + hand it to OpenSSL, and may return only SSL_TLSEXT_ERR_OK, + SSL_TLSEXT_ERR_FATAL, and SSL_TLSEXT_ERR_NOACK. For clients, that callback + is expected to check the OCSP data, and returns a negative value on error, + 0 if the response is not acceptable, or positive if it is. These are + mutually exclusive return code behaviours, and they mean that we need two + helpers so that we always return an appropriate error code if the user's + code throws an exception. + + Given that we have to have two helpers anyway, these helpers are a bit more + helpery than most: specifically, they hide a few more of the OpenSSL + functions so that the user has an easier time writing these callbacks. + + This helper implements the server side. + """ + + def __init__(self, callback: _OCSPServerCallback[Any]) -> None: + _CallbackExceptionHelper.__init__(self) + + @wraps(callback) + def wrapper(ssl, cdata): # type: ignore[no-untyped-def] + try: + conn = Connection._reverse_mapping[ssl] + + # Extract the data if any was provided. + if cdata != _ffi.NULL: + data = _ffi.from_handle(cdata) + else: + data = None + + # Call the callback. + ocsp_data = callback(conn, data) + + if not isinstance(ocsp_data, bytes): + raise TypeError("OCSP callback must return a bytestring.") + + # If the OCSP data was provided, we will pass it to OpenSSL. + # However, we have an early exit here: if no OCSP data was + # provided we will just exit out and tell OpenSSL that there + # is nothing to do. + if not ocsp_data: + return 3 # SSL_TLSEXT_ERR_NOACK + + # OpenSSL takes ownership of this data and expects it to have + # been allocated by OPENSSL_malloc. + ocsp_data_length = len(ocsp_data) + data_ptr = _lib.OPENSSL_malloc(ocsp_data_length) + _ffi.buffer(data_ptr, ocsp_data_length)[:] = ocsp_data + + _lib.SSL_set_tlsext_status_ocsp_resp( + ssl, data_ptr, ocsp_data_length + ) + + return 0 + except Exception as e: + self._problems.append(e) + return 2 # SSL_TLSEXT_ERR_ALERT_FATAL + + self.callback = _ffi.callback("int (*)(SSL *, void *)", wrapper) + + +class _OCSPClientCallbackHelper(_CallbackExceptionHelper): + """ + Wrap a callback such that it can be used as an OCSP callback for the client + side. + + Annoyingly, OpenSSL defines one OCSP callback but uses it in two different + ways. For servers, that callback is expected to retrieve some OCSP data and + hand it to OpenSSL, and may return only SSL_TLSEXT_ERR_OK, + SSL_TLSEXT_ERR_FATAL, and SSL_TLSEXT_ERR_NOACK. For clients, that callback + is expected to check the OCSP data, and returns a negative value on error, + 0 if the response is not acceptable, or positive if it is. These are + mutually exclusive return code behaviours, and they mean that we need two + helpers so that we always return an appropriate error code if the user's + code throws an exception. + + Given that we have to have two helpers anyway, these helpers are a bit more + helpery than most: specifically, they hide a few more of the OpenSSL + functions so that the user has an easier time writing these callbacks. + + This helper implements the client side. + """ + + def __init__(self, callback: _OCSPClientCallback[Any]) -> None: + _CallbackExceptionHelper.__init__(self) + + @wraps(callback) + def wrapper(ssl, cdata): # type: ignore[no-untyped-def] + try: + conn = Connection._reverse_mapping[ssl] + + # Extract the data if any was provided. + if cdata != _ffi.NULL: + data = _ffi.from_handle(cdata) + else: + data = None + + # Get the OCSP data. + ocsp_ptr = _ffi.new("unsigned char **") + ocsp_len = _lib.SSL_get_tlsext_status_ocsp_resp(ssl, ocsp_ptr) + if ocsp_len < 0: + # No OCSP data. + ocsp_data = b"" + else: + # Copy the OCSP data, then pass it to the callback. + ocsp_data = _ffi.buffer(ocsp_ptr[0], ocsp_len)[:] + + valid = callback(conn, ocsp_data, data) + + # Return 1 on success or 0 on error. + return int(bool(valid)) + + except Exception as e: + self._problems.append(e) + # Return negative value if an exception is hit. + return -1 + + self.callback = _ffi.callback("int (*)(SSL *, void *)", wrapper) + + +class _CookieGenerateCallbackHelper(_CallbackExceptionHelper): + def __init__(self, callback: _CookieGenerateCallback) -> None: + _CallbackExceptionHelper.__init__(self) + + @wraps(callback) + def wrapper(ssl, out, outlen): # type: ignore[no-untyped-def] + try: + conn = Connection._reverse_mapping[ssl] + cookie = callback(conn) + out[0 : len(cookie)] = cookie + outlen[0] = len(cookie) + return 1 + except Exception as e: + self._problems.append(e) + # "a zero return value can be used to abort the handshake" + return 0 + + self.callback = _ffi.callback( + "int (*)(SSL *, unsigned char *, unsigned int *)", + wrapper, + ) + + +class _CookieVerifyCallbackHelper(_CallbackExceptionHelper): + def __init__(self, callback: _CookieVerifyCallback) -> None: + _CallbackExceptionHelper.__init__(self) + + @wraps(callback) + def wrapper(ssl, c_cookie, cookie_len): # type: ignore[no-untyped-def] + try: + conn = Connection._reverse_mapping[ssl] + return callback(conn, bytes(c_cookie[0:cookie_len])) + except Exception as e: + self._problems.append(e) + return 0 + + self.callback = _ffi.callback( + "int (*)(SSL *, unsigned char *, unsigned int)", + wrapper, + ) + + +def _asFileDescriptor(obj: Any) -> int: + fd = None + if not isinstance(obj, int): + meth = getattr(obj, "fileno", None) + if meth is not None: + obj = meth() + + if isinstance(obj, int): + fd = obj + + if not isinstance(fd, int): + raise TypeError("argument must be an int, or have a fileno() method.") + elif fd < 0: + raise ValueError( + f"file descriptor cannot be a negative integer ({fd:i})" + ) + + return fd + + +def OpenSSL_version(type: int) -> bytes: + """ + Return a string describing the version of OpenSSL in use. + + :param type: One of the :const:`OPENSSL_` constants defined in this module. + """ + return _ffi.string(_lib.OpenSSL_version(type)) + + +SSLeay_version = OpenSSL_version + + +def _make_requires(flag: int, error: str) -> Callable[[_T], _T]: + """ + Builds a decorator that ensures that functions that rely on OpenSSL + functions that are not present in this build raise NotImplementedError, + rather than AttributeError coming out of cryptography. + + :param flag: A cryptography flag that guards the functions, e.g. + ``Cryptography_HAS_NEXTPROTONEG``. + :param error: The string to be used in the exception if the flag is false. + """ + + def _requires_decorator(func): # type: ignore[no-untyped-def] + if not flag: + + @wraps(func) + def explode(*args, **kwargs): # type: ignore[no-untyped-def] + raise NotImplementedError(error) + + return explode + else: + return func + + return _requires_decorator + + +_requires_keylog = _make_requires( + getattr(_lib, "Cryptography_HAS_KEYLOG", 0), "Key logging not available" +) + + +class Session: + """ + A class representing an SSL session. A session defines certain connection + parameters which may be re-used to speed up the setup of subsequent + connections. + + .. versionadded:: 0.14 + """ + + _session: Any + + +F = TypeVar("F", bound=Callable[..., Any]) + + +def _require_not_used(f: F) -> F: + @wraps(f) + def inner(self: Context, *args: Any, **kwargs: Any) -> Any: + if self._used: + warnings.warn( + ( + "Attempting to mutate a Context after a Connection was " + "created. In the future, this will raise an exception" + ), + DeprecationWarning, + stacklevel=2, + ) + return f(self, *args, **kwargs) + + return typing.cast(F, inner) + + +class Context: + """ + :class:`OpenSSL.SSL.Context` instances define the parameters for setting + up new SSL connections. + + :param method: One of TLS_METHOD, TLS_CLIENT_METHOD, TLS_SERVER_METHOD, + DTLS_METHOD, DTLS_CLIENT_METHOD, or DTLS_SERVER_METHOD. + SSLv23_METHOD, TLSv1_METHOD, etc. are deprecated and should + not be used. + """ + + _methods: typing.ClassVar[ + dict[int, tuple[Callable[[], Any], int | None]] + ] = { + SSLv23_METHOD: (_lib.TLS_method, None), + TLSv1_METHOD: (_lib.TLS_method, TLS1_VERSION), + TLSv1_1_METHOD: (_lib.TLS_method, TLS1_1_VERSION), + TLSv1_2_METHOD: (_lib.TLS_method, TLS1_2_VERSION), + TLS_METHOD: (_lib.TLS_method, None), + TLS_SERVER_METHOD: (_lib.TLS_server_method, None), + TLS_CLIENT_METHOD: (_lib.TLS_client_method, None), + DTLS_METHOD: (_lib.DTLS_method, None), + DTLS_SERVER_METHOD: (_lib.DTLS_server_method, None), + DTLS_CLIENT_METHOD: (_lib.DTLS_client_method, None), + } + + def __init__(self, method: int) -> None: + if not isinstance(method, int): + raise TypeError("method must be an integer") + + try: + method_func, version = self._methods[method] + except KeyError: + raise ValueError("No such protocol") + + method_obj = method_func() + _openssl_assert(method_obj != _ffi.NULL) + + context = _lib.SSL_CTX_new(method_obj) + _openssl_assert(context != _ffi.NULL) + context = _ffi.gc(context, _lib.SSL_CTX_free) + + self._context = context + self._used = False + self._passphrase_helper: _PassphraseHelper | None = None + self._passphrase_callback: _PassphraseCallback[Any] | None = None + self._passphrase_userdata: Any | None = None + self._verify_helper: _VerifyHelper | None = None + self._verify_callback: _VerifyCallback | None = None + self._info_callback = None + self._keylog_callback = None + self._tlsext_servername_callback = None + self._app_data = None + self._alpn_select_helper: _ALPNSelectHelper | None = None + self._alpn_select_callback: _ALPNSelectCallback | None = None + self._ocsp_helper: ( + _OCSPClientCallbackHelper | _OCSPServerCallbackHelper | None + ) = None + self._ocsp_callback: ( + _OCSPClientCallback[Any] | _OCSPServerCallback[Any] | None + ) = None + self._ocsp_data: Any | None = None + self._cookie_generate_helper: _CookieGenerateCallbackHelper | None = ( + None + ) + self._cookie_verify_helper: _CookieVerifyCallbackHelper | None = None + + self.set_mode( + _lib.SSL_MODE_ENABLE_PARTIAL_WRITE + | _lib.SSL_MODE_ACCEPT_MOVING_WRITE_BUFFER + ) + if version is not None: + self.set_min_proto_version(version) + self.set_max_proto_version(version) + + @_require_not_used + def set_min_proto_version(self, version: int) -> None: + """ + Set the minimum supported protocol version. Setting the minimum + version to 0 will enable protocol versions down to the lowest version + supported by the library. + + If the underlying OpenSSL build is missing support for the selected + version, this method will raise an exception. + """ + _openssl_assert( + _lib.SSL_CTX_set_min_proto_version(self._context, version) == 1 + ) + + @_require_not_used + def set_max_proto_version(self, version: int) -> None: + """ + Set the maximum supported protocol version. Setting the maximum + version to 0 will enable protocol versions up to the highest version + supported by the library. + + If the underlying OpenSSL build is missing support for the selected + version, this method will raise an exception. + """ + _openssl_assert( + _lib.SSL_CTX_set_max_proto_version(self._context, version) == 1 + ) + + @_require_not_used + def load_verify_locations( + self, + cafile: _StrOrBytesPath | None, + capath: _StrOrBytesPath | None = None, + ) -> None: + """ + Let SSL know where we can find trusted certificates for the certificate + chain. Note that the certificates have to be in PEM format. + + If capath is passed, it must be a directory prepared using the + ``c_rehash`` tool included with OpenSSL. Either, but not both, of + *pemfile* or *capath* may be :data:`None`. + + :param cafile: In which file we can find the certificates (``bytes`` or + ``str``). + :param capath: In which directory we can find the certificates + (``bytes`` or ``str``). + + :return: None + """ + if cafile is None: + cafile = _ffi.NULL + else: + cafile = _path_bytes(cafile) + + if capath is None: + capath = _ffi.NULL + else: + capath = _path_bytes(capath) + + load_result = _lib.SSL_CTX_load_verify_locations( + self._context, cafile, capath + ) + if not load_result: + _raise_current_error() + + def _wrap_callback( + self, callback: _PassphraseCallback[_T] + ) -> _PassphraseHelper: + @wraps(callback) + def wrapper(size: int, verify: bool, userdata: Any) -> bytes: + return callback(size, verify, self._passphrase_userdata) + + return _PassphraseHelper( + FILETYPE_PEM, wrapper, more_args=True, truncate=True + ) + + @_require_not_used + def set_passwd_cb( + self, + callback: _PassphraseCallback[_T], + userdata: _T | None = None, + ) -> None: + """ + Set the passphrase callback. This function will be called + when a private key with a passphrase is loaded. + + :param callback: The Python callback to use. This must accept three + positional arguments. First, an integer giving the maximum length + of the passphrase it may return. If the returned passphrase is + longer than this, it will be truncated. Second, a boolean value + which will be true if the user should be prompted for the + passphrase twice and the callback should verify that the two values + supplied are equal. Third, the value given as the *userdata* + parameter to :meth:`set_passwd_cb`. The *callback* must return + a byte string. If an error occurs, *callback* should return a false + value (e.g. an empty string). + :param userdata: (optional) A Python object which will be given as + argument to the callback + :return: None + """ + if not callable(callback): + raise TypeError("callback must be callable") + + self._passphrase_helper = self._wrap_callback(callback) + self._passphrase_callback = self._passphrase_helper.callback + _lib.SSL_CTX_set_default_passwd_cb( + self._context, self._passphrase_callback + ) + self._passphrase_userdata = userdata + + @_require_not_used + def set_default_verify_paths(self) -> None: + """ + Specify that the platform provided CA certificates are to be used for + verification purposes. This method has some caveats related to the + binary wheels that cryptography (pyOpenSSL's primary dependency) ships: + + * macOS will only load certificates using this method if the user has + the ``openssl@1.1`` `Homebrew `_ formula installed + in the default location. + * Windows will not work. + * manylinux cryptography wheels will work on most common Linux + distributions in pyOpenSSL 17.1.0 and above. pyOpenSSL detects the + manylinux wheel and attempts to load roots via a fallback path. + + :return: None + """ + # SSL_CTX_set_default_verify_paths will attempt to load certs from + # both a cafile and capath that are set at compile time. However, + # it will first check environment variables and, if present, load + # those paths instead + set_result = _lib.SSL_CTX_set_default_verify_paths(self._context) + _openssl_assert(set_result == 1) + # After attempting to set default_verify_paths we need to know whether + # to go down the fallback path. + # First we'll check to see if any env vars have been set. If so, + # we won't try to do anything else because the user has set the path + # themselves. + if not self._check_env_vars_set("SSL_CERT_DIR", "SSL_CERT_FILE"): + default_dir = _ffi.string(_lib.X509_get_default_cert_dir()) + default_file = _ffi.string(_lib.X509_get_default_cert_file()) + # Now we check to see if the default_dir and default_file are set + # to the exact values we use in our manylinux builds. If they are + # then we know to load the fallbacks + if ( + default_dir == _CRYPTOGRAPHY_MANYLINUX_CA_DIR + and default_file == _CRYPTOGRAPHY_MANYLINUX_CA_FILE + ): + # This is manylinux, let's load our fallback paths + self._fallback_default_verify_paths( + _CERTIFICATE_FILE_LOCATIONS, _CERTIFICATE_PATH_LOCATIONS + ) + + def _check_env_vars_set(self, dir_env_var: str, file_env_var: str) -> bool: + """ + Check to see if the default cert dir/file environment vars are present. + + :return: bool + """ + return ( + os.environ.get(file_env_var) is not None + or os.environ.get(dir_env_var) is not None + ) + + def _fallback_default_verify_paths( + self, file_path: list[str], dir_path: list[str] + ) -> None: + """ + Default verify paths are based on the compiled version of OpenSSL. + However, when pyca/cryptography is compiled as a manylinux wheel + that compiled location can potentially be wrong. So, like Go, we + will try a predefined set of paths and attempt to load roots + from there. + + :return: None + """ + for cafile in file_path: + if os.path.isfile(cafile): + self.load_verify_locations(cafile) + break + + for capath in dir_path: + if os.path.isdir(capath): + self.load_verify_locations(None, capath) + break + + @_require_not_used + def use_certificate_chain_file(self, certfile: _StrOrBytesPath) -> None: + """ + Load a certificate chain from a file. + + :param certfile: The name of the certificate chain file (``bytes`` or + ``str``). Must be PEM encoded. + + :return: None + """ + certfile = _path_bytes(certfile) + + result = _lib.SSL_CTX_use_certificate_chain_file( + self._context, certfile + ) + if not result: + _raise_current_error() + + @_require_not_used + def use_certificate_file( + self, certfile: _StrOrBytesPath, filetype: int = FILETYPE_PEM + ) -> None: + """ + Load a certificate from a file + + :param certfile: The name of the certificate file (``bytes`` or + ``str``). + :param filetype: (optional) The encoding of the file, which is either + :const:`FILETYPE_PEM` or :const:`FILETYPE_ASN1`. The default is + :const:`FILETYPE_PEM`. + + :return: None + """ + certfile = _path_bytes(certfile) + if not isinstance(filetype, int): + raise TypeError("filetype must be an integer") + + use_result = _lib.SSL_CTX_use_certificate_file( + self._context, certfile, filetype + ) + if not use_result: + _raise_current_error() + + @_require_not_used + def use_certificate(self, cert: X509 | x509.Certificate) -> None: + """ + Load a certificate from a X509 object + + :param cert: The X509 object + :return: None + """ + # Mirrored at Connection.use_certificate + if not isinstance(cert, X509): + cert = X509.from_cryptography(cert) + else: + warnings.warn( + ( + "Passing pyOpenSSL X509 objects is deprecated. You " + "should use a cryptography.x509.Certificate instead." + ), + DeprecationWarning, + stacklevel=2, + ) + + use_result = _lib.SSL_CTX_use_certificate(self._context, cert._x509) + if not use_result: + _raise_current_error() + + @_require_not_used + def add_extra_chain_cert(self, certobj: X509 | x509.Certificate) -> None: + """ + Add certificate to chain + + :param certobj: The X509 certificate object to add to the chain + :return: None + """ + if not isinstance(certobj, X509): + certobj = X509.from_cryptography(certobj) + else: + warnings.warn( + ( + "Passing pyOpenSSL X509 objects is deprecated. You " + "should use a cryptography.x509.Certificate instead." + ), + DeprecationWarning, + stacklevel=2, + ) + + copy = _lib.X509_dup(certobj._x509) + add_result = _lib.SSL_CTX_add_extra_chain_cert(self._context, copy) + if not add_result: + # TODO: This is untested. + _lib.X509_free(copy) + _raise_current_error() + + def _raise_passphrase_exception(self) -> None: + if self._passphrase_helper is not None: + self._passphrase_helper.raise_if_problem(Error) + + _raise_current_error() + + @_require_not_used + def use_privatekey_file( + self, keyfile: _StrOrBytesPath, filetype: int = FILETYPE_PEM + ) -> None: + """ + Load a private key from a file + + :param keyfile: The name of the key file (``bytes`` or ``str``) + :param filetype: (optional) The encoding of the file, which is either + :const:`FILETYPE_PEM` or :const:`FILETYPE_ASN1`. The default is + :const:`FILETYPE_PEM`. + + :return: None + """ + keyfile = _path_bytes(keyfile) + + if not isinstance(filetype, int): + raise TypeError("filetype must be an integer") + + use_result = _lib.SSL_CTX_use_PrivateKey_file( + self._context, keyfile, filetype + ) + if not use_result: + self._raise_passphrase_exception() + + @_require_not_used + def use_privatekey(self, pkey: _PrivateKey | PKey) -> None: + """ + Load a private key from a PKey object + + :param pkey: The PKey object + :return: None + """ + # Mirrored at Connection.use_privatekey + if not isinstance(pkey, PKey): + pkey = PKey.from_cryptography_key(pkey) + else: + warnings.warn( + ( + "Passing pyOpenSSL PKey objects is deprecated. You " + "should use a cryptography private key instead." + ), + DeprecationWarning, + stacklevel=2, + ) + + use_result = _lib.SSL_CTX_use_PrivateKey(self._context, pkey._pkey) + if not use_result: + self._raise_passphrase_exception() + + def check_privatekey(self) -> None: + """ + Check if the private key (loaded with :meth:`use_privatekey`) matches + the certificate (loaded with :meth:`use_certificate`) + + :return: :data:`None` (raises :exc:`Error` if something's wrong) + """ + if not _lib.SSL_CTX_check_private_key(self._context): + _raise_current_error() + + @_require_not_used + def load_client_ca(self, cafile: bytes) -> None: + """ + Load the trusted certificates that will be sent to the client. Does + not actually imply any of the certificates are trusted; that must be + configured separately. + + :param bytes cafile: The path to a certificates file in PEM format. + :return: None + """ + ca_list = _lib.SSL_load_client_CA_file( + _text_to_bytes_and_warn("cafile", cafile) + ) + _openssl_assert(ca_list != _ffi.NULL) + _lib.SSL_CTX_set_client_CA_list(self._context, ca_list) + + @_require_not_used + def set_session_id(self, buf: bytes) -> None: + """ + Set the session id to *buf* within which a session can be reused for + this Context object. This is needed when doing session resumption, + because there is no way for a stored session to know which Context + object it is associated with. + + :param bytes buf: The session id. + + :returns: None + """ + buf = _text_to_bytes_and_warn("buf", buf) + _openssl_assert( + _lib.SSL_CTX_set_session_id_context(self._context, buf, len(buf)) + == 1 + ) + + @_require_not_used + def set_session_cache_mode(self, mode: int) -> int: + """ + Set the behavior of the session cache used by all connections using + this Context. The previously set mode is returned. See + :const:`SESS_CACHE_*` for details about particular modes. + + :param mode: One or more of the SESS_CACHE_* flags (combine using + bitwise or) + :returns: The previously set caching mode. + + .. versionadded:: 0.14 + """ + if not isinstance(mode, int): + raise TypeError("mode must be an integer") + + return _lib.SSL_CTX_set_session_cache_mode(self._context, mode) + + def get_session_cache_mode(self) -> int: + """ + Get the current session cache mode. + + :returns: The currently used cache mode. + + .. versionadded:: 0.14 + """ + return _lib.SSL_CTX_get_session_cache_mode(self._context) + + @_require_not_used + def set_verify( + self, mode: int, callback: _VerifyCallback | None = None + ) -> None: + """ + Set the verification flags for this Context object to *mode* and + specify that *callback* should be used for verification callbacks. + + :param mode: The verify mode, this should be one of + :const:`VERIFY_NONE` and :const:`VERIFY_PEER`. If + :const:`VERIFY_PEER` is used, *mode* can be OR:ed with + :const:`VERIFY_FAIL_IF_NO_PEER_CERT` and + :const:`VERIFY_CLIENT_ONCE` to further control the behaviour. + :param callback: The optional Python verification callback to use. + This should take five arguments: A Connection object, an X509 + object, and three integer variables, which are in turn potential + error number, error depth and return code. *callback* should + return True if verification passes and False otherwise. + If omitted, OpenSSL's default verification is used. + :return: None + + See SSL_CTX_set_verify(3SSL) for further details. + """ + if not isinstance(mode, int): + raise TypeError("mode must be an integer") + + if callback is None: + self._verify_helper = None + self._verify_callback = None + _lib.SSL_CTX_set_verify(self._context, mode, _ffi.NULL) + else: + if not callable(callback): + raise TypeError("callback must be callable") + + self._verify_helper = _VerifyHelper(callback) + self._verify_callback = self._verify_helper.callback + _lib.SSL_CTX_set_verify(self._context, mode, self._verify_callback) + + @_require_not_used + def set_verify_depth(self, depth: int) -> None: + """ + Set the maximum depth for the certificate chain verification that shall + be allowed for this Context object. + + :param depth: An integer specifying the verify depth + :return: None + """ + if not isinstance(depth, int): + raise TypeError("depth must be an integer") + + _lib.SSL_CTX_set_verify_depth(self._context, depth) + + def get_verify_mode(self) -> int: + """ + Retrieve the Context object's verify mode, as set by + :meth:`set_verify`. + + :return: The verify mode + """ + return _lib.SSL_CTX_get_verify_mode(self._context) + + def get_verify_depth(self) -> int: + """ + Retrieve the Context object's verify depth, as set by + :meth:`set_verify_depth`. + + :return: The verify depth + """ + return _lib.SSL_CTX_get_verify_depth(self._context) + + @_require_not_used + def load_tmp_dh(self, dhfile: _StrOrBytesPath) -> None: + """ + Load parameters for Ephemeral Diffie-Hellman + + :param dhfile: The file to load EDH parameters from (``bytes`` or + ``str``). + + :return: None + """ + dhfile = _path_bytes(dhfile) + + bio = _lib.BIO_new_file(dhfile, b"r") + if bio == _ffi.NULL: + _raise_current_error() + bio = _ffi.gc(bio, _lib.BIO_free) + + dh = _lib.PEM_read_bio_DHparams(bio, _ffi.NULL, _ffi.NULL, _ffi.NULL) + dh = _ffi.gc(dh, _lib.DH_free) + res = _lib.SSL_CTX_set_tmp_dh(self._context, dh) + _openssl_assert(res == 1) + + @_require_not_used + def set_tmp_ecdh(self, curve: _EllipticCurve | ec.EllipticCurve) -> None: + """ + Select a curve to use for ECDHE key exchange. + + :param curve: A curve instance from cryptography + (:class:`~cryptogragraphy.hazmat.primitives.asymmetric.ec.EllipticCurve`). + Alternatively (deprecated) a curve object from either + :meth:`OpenSSL.crypto.get_elliptic_curve` or + :meth:`OpenSSL.crypto.get_elliptic_curves`. + + :return: None + """ + + if isinstance(curve, _EllipticCurve): + warnings.warn( + ( + "Passing pyOpenSSL elliptic curves to set_tmp_ecdh is " + "deprecated. You should use cryptography's elliptic curve " + "types instead." + ), + DeprecationWarning, + stacklevel=2, + ) + _lib.SSL_CTX_set_tmp_ecdh(self._context, curve._to_EC_KEY()) + else: + name = curve.name + if name == "secp192r1": + name = "prime192v1" + elif name == "secp256r1": + name = "prime256v1" + nid = _lib.OBJ_txt2nid(name.encode()) + if nid == _lib.NID_undef: + _raise_current_error() + + ec = _lib.EC_KEY_new_by_curve_name(nid) + _openssl_assert(ec != _ffi.NULL) + ec = _ffi.gc(ec, _lib.EC_KEY_free) + _lib.SSL_CTX_set_tmp_ecdh(self._context, ec) + + @_require_not_used + def set_cipher_list(self, cipher_list: bytes) -> None: + """ + Set the list of ciphers to be used in this context. + + See the OpenSSL manual for more information (e.g. + :manpage:`ciphers(1)`). + + Note this API does not change the cipher suites used in TLS 1.3 + Use `set_tls13_ciphersuites` for that. + + :param bytes cipher_list: An OpenSSL cipher string. + :return: None + """ + cipher_list = _text_to_bytes_and_warn("cipher_list", cipher_list) + + if not isinstance(cipher_list, bytes): + raise TypeError("cipher_list must be a byte string.") + + _openssl_assert( + _lib.SSL_CTX_set_cipher_list(self._context, cipher_list) == 1 + ) + + @_require_not_used + def set_tls13_ciphersuites(self, ciphersuites: bytes) -> None: + """ + Set the list of TLS 1.3 ciphers to be used in this context. + OpenSSL maintains a separate list of TLS 1.3+ ciphers to + ciphers for TLS 1.2 and lowers. + + See the OpenSSL manual for more information (e.g. + :manpage:`ciphers(1)`). + + :param bytes ciphersuites: An OpenSSL cipher string containing + TLS 1.3+ ciphersuites. + :return: None + + .. versionadded:: 25.2.0 + """ + if not isinstance(ciphersuites, bytes): + raise TypeError("ciphersuites must be a byte string.") + + _openssl_assert( + _lib.SSL_CTX_set_ciphersuites(self._context, ciphersuites) == 1 + ) + + @_require_not_used + def set_client_ca_list( + self, certificate_authorities: Sequence[X509Name] + ) -> None: + """ + Set the list of preferred client certificate signers for this server + context. + + This list of certificate authorities will be sent to the client when + the server requests a client certificate. + + :param certificate_authorities: a sequence of X509Names. + :return: None + + .. versionadded:: 0.10 + """ + name_stack = _lib.sk_X509_NAME_new_null() + _openssl_assert(name_stack != _ffi.NULL) + + try: + for ca_name in certificate_authorities: + if not isinstance(ca_name, X509Name): + raise TypeError( + f"client CAs must be X509Name objects, not " + f"{type(ca_name).__name__} objects" + ) + copy = _lib.X509_NAME_dup(ca_name._name) + _openssl_assert(copy != _ffi.NULL) + push_result = _lib.sk_X509_NAME_push(name_stack, copy) + if not push_result: + _lib.X509_NAME_free(copy) + _raise_current_error() + except Exception: + _lib.sk_X509_NAME_free(name_stack) + raise + + _lib.SSL_CTX_set_client_CA_list(self._context, name_stack) + + @_require_not_used + def add_client_ca( + self, certificate_authority: X509 | x509.Certificate + ) -> None: + """ + Add the CA certificate to the list of preferred signers for this + context. + + The list of certificate authorities will be sent to the client when the + server requests a client certificate. + + :param certificate_authority: certificate authority's X509 certificate. + :return: None + + .. versionadded:: 0.10 + """ + if not isinstance(certificate_authority, X509): + certificate_authority = X509.from_cryptography( + certificate_authority + ) + else: + warnings.warn( + ( + "Passing pyOpenSSL X509 objects is deprecated. You " + "should use a cryptography.x509.Certificate instead." + ), + DeprecationWarning, + stacklevel=2, + ) + + add_result = _lib.SSL_CTX_add_client_CA( + self._context, certificate_authority._x509 + ) + _openssl_assert(add_result == 1) + + @_require_not_used + def set_timeout(self, timeout: int) -> None: + """ + Set the timeout for newly created sessions for this Context object to + *timeout*. The default value is 300 seconds. See the OpenSSL manual + for more information (e.g. :manpage:`SSL_CTX_set_timeout(3)`). + + :param timeout: The timeout in (whole) seconds + :return: The previous session timeout + """ + if not isinstance(timeout, int): + raise TypeError("timeout must be an integer") + + return _lib.SSL_CTX_set_timeout(self._context, timeout) + + def get_timeout(self) -> int: + """ + Retrieve session timeout, as set by :meth:`set_timeout`. The default + is 300 seconds. + + :return: The session timeout + """ + return _lib.SSL_CTX_get_timeout(self._context) + + @_require_not_used + def set_info_callback( + self, callback: Callable[[Connection, int, int], None] + ) -> None: + """ + Set the information callback to *callback*. This function will be + called from time to time during SSL handshakes. + + :param callback: The Python callback to use. This should take three + arguments: a Connection object and two integers. The first integer + specifies where in the SSL handshake the function was called, and + the other the return code from a (possibly failed) internal + function call. + :return: None + """ + + @wraps(callback) + def wrapper(ssl, where, return_code): # type: ignore[no-untyped-def] + callback(Connection._reverse_mapping[ssl], where, return_code) + + self._info_callback = _ffi.callback( + "void (*)(const SSL *, int, int)", wrapper + ) + _lib.SSL_CTX_set_info_callback(self._context, self._info_callback) + + @_requires_keylog + @_require_not_used + def set_keylog_callback( + self, callback: Callable[[Connection, bytes], None] + ) -> None: + """ + Set the TLS key logging callback to *callback*. This function will be + called whenever TLS key material is generated or received, in order + to allow applications to store this keying material for debugging + purposes. + + :param callback: The Python callback to use. This should take two + arguments: a Connection object and a bytestring that contains + the key material in the format used by NSS for its SSLKEYLOGFILE + debugging output. + :return: None + """ + + @wraps(callback) + def wrapper(ssl, line): # type: ignore[no-untyped-def] + line = _ffi.string(line) + callback(Connection._reverse_mapping[ssl], line) + + self._keylog_callback = _ffi.callback( + "void (*)(const SSL *, const char *)", wrapper + ) + _lib.SSL_CTX_set_keylog_callback(self._context, self._keylog_callback) + + def get_app_data(self) -> Any: + """ + Get the application data (supplied via :meth:`set_app_data()`) + + :return: The application data + """ + return self._app_data + + @_require_not_used + def set_app_data(self, data: Any) -> None: + """ + Set the application data (will be returned from get_app_data()) + + :param data: Any Python object + :return: None + """ + self._app_data = data + + def get_cert_store(self) -> X509Store | None: + """ + Get the certificate store for the context. This can be used to add + "trusted" certificates without using the + :meth:`load_verify_locations` method. + + :return: A X509Store object or None if it does not have one. + """ + store = _lib.SSL_CTX_get_cert_store(self._context) + if store == _ffi.NULL: + # TODO: This is untested. + return None + + pystore = X509Store.__new__(X509Store) + pystore._store = store + return pystore + + @_require_not_used + def set_options(self, options: int) -> int: + """ + Add options. Options set before are not cleared! + This method should be used with the :const:`OP_*` constants. + + :param options: The options to add. + :return: The new option bitmask. + """ + if not isinstance(options, int): + raise TypeError("options must be an integer") + + return _lib.SSL_CTX_set_options(self._context, options) + + @_require_not_used + def set_mode(self, mode: int) -> int: + """ + Add modes via bitmask. Modes set before are not cleared! This method + should be used with the :const:`MODE_*` constants. + + :param mode: The mode to add. + :return: The new mode bitmask. + """ + if not isinstance(mode, int): + raise TypeError("mode must be an integer") + + return _lib.SSL_CTX_set_mode(self._context, mode) + + @_require_not_used + def clear_mode(self, mode_to_clear: int) -> int: + """ + Modes previously set cannot be overwritten without being + cleared first. This method should be used to clear existing modes. + """ + return _lib.SSL_CTX_clear_mode(self._context, mode_to_clear) + + @_require_not_used + def set_tlsext_servername_callback( + self, callback: Callable[[Connection], None] + ) -> None: + """ + Specify a callback function to be called when clients specify a server + name. + + :param callback: The callback function. It will be invoked with one + argument, the Connection instance. + + .. versionadded:: 0.13 + """ + + @wraps(callback) + def wrapper(ssl, alert, arg): # type: ignore[no-untyped-def] + callback(Connection._reverse_mapping[ssl]) + return 0 + + self._tlsext_servername_callback = _ffi.callback( + "int (*)(SSL *, int *, void *)", wrapper + ) + _lib.SSL_CTX_set_tlsext_servername_callback( + self._context, self._tlsext_servername_callback + ) + + @_require_not_used + def set_tlsext_use_srtp(self, profiles: bytes) -> None: + """ + Enable support for negotiating SRTP keying material. + + :param bytes profiles: A colon delimited list of protection profile + names, like ``b'SRTP_AES128_CM_SHA1_80:SRTP_AES128_CM_SHA1_32'``. + :return: None + """ + if not isinstance(profiles, bytes): + raise TypeError("profiles must be a byte string.") + + _openssl_assert( + _lib.SSL_CTX_set_tlsext_use_srtp(self._context, profiles) == 0 + ) + + @_require_not_used + def set_alpn_protos(self, protos: list[bytes]) -> None: + """ + Specify the protocols that the client is prepared to speak after the + TLS connection has been negotiated using Application Layer Protocol + Negotiation. + + :param protos: A list of the protocols to be offered to the server. + This list should be a Python list of bytestrings representing the + protocols to offer, e.g. ``[b'http/1.1', b'spdy/2']``. + """ + # Different versions of OpenSSL are inconsistent about how they handle + # empty proto lists (see #1043), so we avoid the problem entirely by + # rejecting them ourselves. + if not protos: + raise ValueError("at least one protocol must be specified") + + # Take the list of protocols and join them together, prefixing them + # with their lengths. + protostr = b"".join( + chain.from_iterable((bytes((len(p),)), p) for p in protos) + ) + + # Build a C string from the list. We don't need to save this off + # because OpenSSL immediately copies the data out. + input_str = _ffi.new("unsigned char[]", protostr) + + # https://www.openssl.org/docs/man1.1.0/man3/SSL_CTX_set_alpn_protos.html: + # SSL_CTX_set_alpn_protos() and SSL_set_alpn_protos() + # return 0 on success, and non-0 on failure. + # WARNING: these functions reverse the return value convention. + _openssl_assert( + _lib.SSL_CTX_set_alpn_protos( + self._context, input_str, len(protostr) + ) + == 0 + ) + + @_require_not_used + def set_alpn_select_callback(self, callback: _ALPNSelectCallback) -> None: + """ + Specify a callback function that will be called on the server when a + client offers protocols using ALPN. + + :param callback: The callback function. It will be invoked with two + arguments: the Connection, and a list of offered protocols as + bytestrings, e.g ``[b'http/1.1', b'spdy/2']``. It can return + one of those bytestrings to indicate the chosen protocol, the + empty bytestring to terminate the TLS connection, or the + :py:obj:`NO_OVERLAPPING_PROTOCOLS` to indicate that no offered + protocol was selected, but that the connection should not be + aborted. + """ + self._alpn_select_helper = _ALPNSelectHelper(callback) + self._alpn_select_callback = self._alpn_select_helper.callback + _lib.SSL_CTX_set_alpn_select_cb( + self._context, self._alpn_select_callback, _ffi.NULL + ) + + def _set_ocsp_callback( + self, + helper: _OCSPClientCallbackHelper | _OCSPServerCallbackHelper, + data: Any | None, + ) -> None: + """ + This internal helper does the common work for + ``set_ocsp_server_callback`` and ``set_ocsp_client_callback``, which is + almost all of it. + """ + self._ocsp_helper = helper + self._ocsp_callback = helper.callback + if data is None: + self._ocsp_data = _ffi.NULL + else: + self._ocsp_data = _ffi.new_handle(data) + + rc = _lib.SSL_CTX_set_tlsext_status_cb( + self._context, self._ocsp_callback + ) + _openssl_assert(rc == 1) + rc = _lib.SSL_CTX_set_tlsext_status_arg(self._context, self._ocsp_data) + _openssl_assert(rc == 1) + + @_require_not_used + def set_ocsp_server_callback( + self, + callback: _OCSPServerCallback[_T], + data: _T | None = None, + ) -> None: + """ + Set a callback to provide OCSP data to be stapled to the TLS handshake + on the server side. + + :param callback: The callback function. It will be invoked with two + arguments: the Connection, and the optional arbitrary data you have + provided. The callback must return a bytestring that contains the + OCSP data to staple to the handshake. If no OCSP data is available + for this connection, return the empty bytestring. + :param data: Some opaque data that will be passed into the callback + function when called. This can be used to avoid needing to do + complex data lookups or to keep track of what context is being + used. This parameter is optional. + """ + helper = _OCSPServerCallbackHelper(callback) + self._set_ocsp_callback(helper, data) + + @_require_not_used + def set_ocsp_client_callback( + self, + callback: _OCSPClientCallback[_T], + data: _T | None = None, + ) -> None: + """ + Set a callback to validate OCSP data stapled to the TLS handshake on + the client side. + + :param callback: The callback function. It will be invoked with three + arguments: the Connection, a bytestring containing the stapled OCSP + assertion, and the optional arbitrary data you have provided. The + callback must return a boolean that indicates the result of + validating the OCSP data: ``True`` if the OCSP data is valid and + the certificate can be trusted, or ``False`` if either the OCSP + data is invalid or the certificate has been revoked. + :param data: Some opaque data that will be passed into the callback + function when called. This can be used to avoid needing to do + complex data lookups or to keep track of what context is being + used. This parameter is optional. + """ + helper = _OCSPClientCallbackHelper(callback) + self._set_ocsp_callback(helper, data) + + @_require_not_used + def set_cookie_generate_callback( + self, callback: _CookieGenerateCallback + ) -> None: + self._cookie_generate_helper = _CookieGenerateCallbackHelper(callback) + _lib.SSL_CTX_set_cookie_generate_cb( + self._context, + self._cookie_generate_helper.callback, + ) + + @_require_not_used + def set_cookie_verify_callback( + self, callback: _CookieVerifyCallback + ) -> None: + self._cookie_verify_helper = _CookieVerifyCallbackHelper(callback) + _lib.SSL_CTX_set_cookie_verify_cb( + self._context, + self._cookie_verify_helper.callback, + ) + + +class Connection: + _reverse_mapping: typing.MutableMapping[Any, Connection] = ( + WeakValueDictionary() + ) + + def __init__( + self, context: Context, socket: socket.socket | None = None + ) -> None: + """ + Create a new Connection object, using the given OpenSSL.SSL.Context + instance and socket. + + :param context: An SSL Context to use for this connection + :param socket: The socket to use for transport layer + """ + if not isinstance(context, Context): + raise TypeError("context must be a Context instance") + + context._used = True + + ssl = _lib.SSL_new(context._context) + self._ssl = _ffi.gc(ssl, _lib.SSL_free) + # We set SSL_MODE_AUTO_RETRY to handle situations where OpenSSL returns + # an SSL_ERROR_WANT_READ when processing a non-application data packet + # even though there is still data on the underlying transport. + # See https://github.com/openssl/openssl/issues/6234 for more details. + _lib.SSL_set_mode(self._ssl, _lib.SSL_MODE_AUTO_RETRY) + self._context = context + self._app_data = None + + # References to strings used for Application Layer Protocol + # Negotiation. These strings get copied at some point but it's well + # after the callback returns, so we have to hang them somewhere to + # avoid them getting freed. + self._alpn_select_callback_args: Any = None + + # Reference the verify_callback of the Context. This ensures that if + # set_verify is called again after the SSL object has been created we + # do not point to a dangling reference + self._verify_helper = context._verify_helper + self._verify_callback = context._verify_callback + + # And likewise for the cookie callbacks + self._cookie_generate_helper = context._cookie_generate_helper + self._cookie_verify_helper = context._cookie_verify_helper + + self._reverse_mapping[self._ssl] = self + + if socket is None: + self._socket = None + # Don't set up any gc for these, SSL_free will take care of them. + self._into_ssl = _lib.BIO_new(_lib.BIO_s_mem()) + _openssl_assert(self._into_ssl != _ffi.NULL) + + self._from_ssl = _lib.BIO_new(_lib.BIO_s_mem()) + _openssl_assert(self._from_ssl != _ffi.NULL) + + _lib.SSL_set_bio(self._ssl, self._into_ssl, self._from_ssl) + else: + self._into_ssl = None + self._from_ssl = None + self._socket = socket + set_result = _lib.SSL_set_fd( + self._ssl, _asFileDescriptor(self._socket) + ) + _openssl_assert(set_result == 1) + + def __getattr__(self, name: str) -> Any: + """ + Look up attributes on the wrapped socket object if they are not found + on the Connection object. + """ + if self._socket is None: + raise AttributeError( + f"'{self.__class__.__name__}' object has no attribute '{name}'" + ) + else: + return getattr(self._socket, name) + + def _raise_ssl_error(self, ssl: Any, result: int) -> None: + if self._context._verify_helper is not None: + self._context._verify_helper.raise_if_problem() + if self._context._alpn_select_helper is not None: + self._context._alpn_select_helper.raise_if_problem() + if self._context._ocsp_helper is not None: + self._context._ocsp_helper.raise_if_problem() + + error = _lib.SSL_get_error(ssl, result) + if error == _lib.SSL_ERROR_WANT_READ: + raise WantReadError() + elif error == _lib.SSL_ERROR_WANT_WRITE: + raise WantWriteError() + elif error == _lib.SSL_ERROR_ZERO_RETURN: + raise ZeroReturnError() + elif error == _lib.SSL_ERROR_WANT_X509_LOOKUP: + # TODO: This is untested. + raise WantX509LookupError() + elif error == _lib.SSL_ERROR_SYSCALL: + if platform == "win32": + errno = _ffi.getwinerror()[0] + else: + errno = _ffi.errno + if _lib.ERR_peek_error() == 0 or errno != 0: + if result < 0 and errno != 0: + raise SysCallError(errno, errorcode.get(errno)) + raise SysCallError(-1, "Unexpected EOF") + else: + # TODO: This is untested, but I think twisted hits it? + _raise_current_error() + elif error == _lib.SSL_ERROR_SSL and _lib.ERR_peek_error() != 0: + # In 3.0.x an unexpected EOF no longer triggers syscall error + # but we want to maintain compatibility so we check here and + # raise syscall if it is an EOF. Since we're not actually sure + # what else could raise SSL_ERROR_SSL we check for the presence + # of the OpenSSL 3 constant SSL_R_UNEXPECTED_EOF_WHILE_READING + # and if it's not present we just raise an error, which matches + # the behavior before we added this elif section + peeked_error = _lib.ERR_peek_error() + reason = _lib.ERR_GET_REASON(peeked_error) + if _lib.Cryptography_HAS_UNEXPECTED_EOF_WHILE_READING: + _openssl_assert( + reason == _lib.SSL_R_UNEXPECTED_EOF_WHILE_READING + ) + _lib.ERR_clear_error() + raise SysCallError(-1, "Unexpected EOF") + else: + _raise_current_error() + elif error == _lib.SSL_ERROR_NONE: + pass + else: + _raise_current_error() + + def get_context(self) -> Context: + """ + Retrieve the :class:`Context` object associated with this + :class:`Connection`. + """ + return self._context + + def set_context(self, context: Context) -> None: + """ + Switch this connection to a new session context. + + :param context: A :class:`Context` instance giving the new session + context to use. + """ + if not isinstance(context, Context): + raise TypeError("context must be a Context instance") + + _lib.SSL_set_SSL_CTX(self._ssl, context._context) + self._context = context + self._context._used = True + + def get_servername(self) -> bytes | None: + """ + Retrieve the servername extension value if provided in the client hello + message, or None if there wasn't one. + + :return: A byte string giving the server name or :data:`None`. + + .. versionadded:: 0.13 + """ + name = _lib.SSL_get_servername( + self._ssl, _lib.TLSEXT_NAMETYPE_host_name + ) + if name == _ffi.NULL: + return None + + return _ffi.string(name) + + def set_verify( + self, mode: int, callback: _VerifyCallback | None = None + ) -> None: + """ + Override the Context object's verification flags for this specific + connection. See :py:meth:`Context.set_verify` for details. + """ + if not isinstance(mode, int): + raise TypeError("mode must be an integer") + + if callback is None: + self._verify_helper = None + self._verify_callback = None + _lib.SSL_set_verify(self._ssl, mode, _ffi.NULL) + else: + if not callable(callback): + raise TypeError("callback must be callable") + + self._verify_helper = _VerifyHelper(callback) + self._verify_callback = self._verify_helper.callback + _lib.SSL_set_verify(self._ssl, mode, self._verify_callback) + + def get_verify_mode(self) -> int: + """ + Retrieve the Connection object's verify mode, as set by + :meth:`set_verify`. + + :return: The verify mode + """ + return _lib.SSL_get_verify_mode(self._ssl) + + def use_certificate(self, cert: X509 | x509.Certificate) -> None: + """ + Load a certificate from a X509 object + + :param cert: The X509 object + :return: None + """ + # Mirrored from Context.use_certificate + if not isinstance(cert, X509): + cert = X509.from_cryptography(cert) + else: + warnings.warn( + ( + "Passing pyOpenSSL X509 objects is deprecated. You " + "should use a cryptography.x509.Certificate instead." + ), + DeprecationWarning, + stacklevel=2, + ) + + use_result = _lib.SSL_use_certificate(self._ssl, cert._x509) + if not use_result: + _raise_current_error() + + def use_privatekey(self, pkey: _PrivateKey | PKey) -> None: + """ + Load a private key from a PKey object + + :param pkey: The PKey object + :return: None + """ + # Mirrored from Context.use_privatekey + if not isinstance(pkey, PKey): + pkey = PKey.from_cryptography_key(pkey) + else: + warnings.warn( + ( + "Passing pyOpenSSL PKey objects is deprecated. You " + "should use a cryptography private key instead." + ), + DeprecationWarning, + stacklevel=2, + ) + + use_result = _lib.SSL_use_PrivateKey(self._ssl, pkey._pkey) + if not use_result: + self._context._raise_passphrase_exception() + + def set_ciphertext_mtu(self, mtu: int) -> None: + """ + For DTLS, set the maximum UDP payload size (*not* including IP/UDP + overhead). + + Note that you might have to set :data:`OP_NO_QUERY_MTU` to prevent + OpenSSL from spontaneously clearing this. + + :param mtu: An integer giving the maximum transmission unit. + + .. versionadded:: 21.1 + """ + _lib.SSL_set_mtu(self._ssl, mtu) + + def get_cleartext_mtu(self) -> int: + """ + For DTLS, get the maximum size of unencrypted data you can pass to + :meth:`write` without exceeding the MTU (as passed to + :meth:`set_ciphertext_mtu`). + + :return: The effective MTU as an integer. + + .. versionadded:: 21.1 + """ + + if not hasattr(_lib, "DTLS_get_data_mtu"): + raise NotImplementedError("requires OpenSSL 1.1.1 or better") + return _lib.DTLS_get_data_mtu(self._ssl) + + def set_tlsext_host_name(self, name: bytes) -> None: + """ + Set the value of the servername extension to send in the client hello. + + :param name: A byte string giving the name. + + .. versionadded:: 0.13 + """ + if not isinstance(name, bytes): + raise TypeError("name must be a byte string") + elif b"\0" in name: + raise TypeError("name must not contain NUL byte") + + # XXX I guess this can fail sometimes? + _lib.SSL_set_tlsext_host_name(self._ssl, name) + + def pending(self) -> int: + """ + Get the number of bytes that can be safely read from the SSL buffer + (**not** the underlying transport buffer). + + :return: The number of bytes available in the receive buffer. + """ + return _lib.SSL_pending(self._ssl) + + def send(self, buf: _Buffer, flags: int = 0) -> int: + """ + Send data on the connection. NOTE: If you get one of the WantRead, + WantWrite or WantX509Lookup exceptions on this, you have to call the + method again with the SAME buffer. + + :param buf: The string, buffer or memoryview to send + :param flags: (optional) Included for compatibility with the socket + API, the value is ignored + :return: The number of bytes written + """ + # Backward compatibility + buf = _text_to_bytes_and_warn("buf", buf) + + with _ffi.from_buffer(buf) as data: + # check len(buf) instead of len(data) for testability + if len(buf) > 2147483647: + raise ValueError( + "Cannot send more than 2**31-1 bytes at once." + ) + + result = _lib.SSL_write(self._ssl, data, len(data)) + self._raise_ssl_error(self._ssl, result) + + return result + + write = send + + def sendall(self, buf: _Buffer, flags: int = 0) -> int: + """ + Send "all" data on the connection. This calls send() repeatedly until + all data is sent. If an error occurs, it's impossible to tell how much + data has been sent. + + :param buf: The string, buffer or memoryview to send + :param flags: (optional) Included for compatibility with the socket + API, the value is ignored + :return: The number of bytes written + """ + buf = _text_to_bytes_and_warn("buf", buf) + + with _ffi.from_buffer(buf) as data: + left_to_send = len(buf) + total_sent = 0 + + while left_to_send: + # SSL_write's num arg is an int, + # so we cannot send more than 2**31-1 bytes at once. + result = _lib.SSL_write( + self._ssl, data + total_sent, min(left_to_send, 2147483647) + ) + self._raise_ssl_error(self._ssl, result) + total_sent += result + left_to_send -= result + + return total_sent + + def recv(self, bufsiz: int, flags: int | None = None) -> bytes: + """ + Receive data on the connection. + + :param bufsiz: The maximum number of bytes to read + :param flags: (optional) The only supported flag is ``MSG_PEEK``, + all other flags are ignored. + :return: The string read from the Connection + """ + buf = _no_zero_allocator("char[]", bufsiz) + if flags is not None and flags & socket.MSG_PEEK: + result = _lib.SSL_peek(self._ssl, buf, bufsiz) + else: + result = _lib.SSL_read(self._ssl, buf, bufsiz) + self._raise_ssl_error(self._ssl, result) + return _ffi.buffer(buf, result)[:] + + read = recv + + def recv_into( + self, + buffer: Any, # collections.abc.Buffer once we use Python 3.12+ + nbytes: int | None = None, + flags: int | None = None, + ) -> int: + """ + Receive data on the connection and copy it directly into the provided + buffer, rather than creating a new string. + + :param buffer: The buffer to copy into. + :param nbytes: (optional) The maximum number of bytes to read into the + buffer. If not present, defaults to the size of the buffer. If + larger than the size of the buffer, is reduced to the size of the + buffer. + :param flags: (optional) The only supported flag is ``MSG_PEEK``, + all other flags are ignored. + :return: The number of bytes read into the buffer. + """ + if nbytes is None: + nbytes = len(buffer) + else: + nbytes = min(nbytes, len(buffer)) + + # We need to create a temporary buffer. This is annoying, it would be + # better if we could pass memoryviews straight into the SSL_read call, + # but right now we can't. Revisit this if CFFI gets that ability. + buf = _no_zero_allocator("char[]", nbytes) + if flags is not None and flags & socket.MSG_PEEK: + result = _lib.SSL_peek(self._ssl, buf, nbytes) + else: + result = _lib.SSL_read(self._ssl, buf, nbytes) + self._raise_ssl_error(self._ssl, result) + + # This strange line is all to avoid a memory copy. The buffer protocol + # should allow us to assign a CFFI buffer to the LHS of this line, but + # on CPython 3.3+ that segfaults. As a workaround, we can temporarily + # wrap it in a memoryview. + buffer[:result] = memoryview(_ffi.buffer(buf, result)) + + return result + + def _handle_bio_errors(self, bio: Any, result: int) -> typing.NoReturn: + if _lib.BIO_should_retry(bio): + if _lib.BIO_should_read(bio): + raise WantReadError() + elif _lib.BIO_should_write(bio): + # TODO: This is untested. + raise WantWriteError() + elif _lib.BIO_should_io_special(bio): + # TODO: This is untested. I think io_special means the socket + # BIO has a not-yet connected socket. + raise ValueError("BIO_should_io_special") + else: + # TODO: This is untested. + raise ValueError("unknown bio failure") + else: + # TODO: This is untested. + _raise_current_error() + + def bio_read(self, bufsiz: int) -> bytes: + """ + If the Connection was created with a memory BIO, this method can be + used to read bytes from the write end of that memory BIO. Many + Connection methods will add bytes which must be read in this manner or + the buffer will eventually fill up and the Connection will be able to + take no further actions. + + :param bufsiz: The maximum number of bytes to read + :return: The string read. + """ + if self._from_ssl is None: + raise TypeError("Connection sock was not None") + + if not isinstance(bufsiz, int): + raise TypeError("bufsiz must be an integer") + + buf = _no_zero_allocator("char[]", bufsiz) + result = _lib.BIO_read(self._from_ssl, buf, bufsiz) + if result <= 0: + self._handle_bio_errors(self._from_ssl, result) + + return _ffi.buffer(buf, result)[:] + + def bio_write(self, buf: _Buffer) -> int: + """ + If the Connection was created with a memory BIO, this method can be + used to add bytes to the read end of that memory BIO. The Connection + can then read the bytes (for example, in response to a call to + :meth:`recv`). + + :param buf: The string to put into the memory BIO. + :return: The number of bytes written + """ + buf = _text_to_bytes_and_warn("buf", buf) + + if self._into_ssl is None: + raise TypeError("Connection sock was not None") + + with _ffi.from_buffer(buf) as data: + result = _lib.BIO_write(self._into_ssl, data, len(data)) + if result <= 0: + self._handle_bio_errors(self._into_ssl, result) + return result + + def renegotiate(self) -> bool: + """ + Renegotiate the session. + + :return: True if the renegotiation can be started, False otherwise + """ + if not self.renegotiate_pending(): + _openssl_assert(_lib.SSL_renegotiate(self._ssl) == 1) + return True + return False + + def do_handshake(self) -> None: + """ + Perform an SSL handshake (usually called after :meth:`renegotiate` or + one of :meth:`set_accept_state` or :meth:`set_connect_state`). This can + raise the same exceptions as :meth:`send` and :meth:`recv`. + + :return: None. + """ + result = _lib.SSL_do_handshake(self._ssl) + self._raise_ssl_error(self._ssl, result) + + def renegotiate_pending(self) -> bool: + """ + Check if there's a renegotiation in progress, it will return False once + a renegotiation is finished. + + :return: Whether there's a renegotiation in progress + """ + return _lib.SSL_renegotiate_pending(self._ssl) == 1 + + def total_renegotiations(self) -> int: + """ + Find out the total number of renegotiations. + + :return: The number of renegotiations. + """ + return _lib.SSL_total_renegotiations(self._ssl) + + def connect(self, addr: Any) -> None: + """ + Call the :meth:`connect` method of the underlying socket and set up SSL + on the socket, using the :class:`Context` object supplied to this + :class:`Connection` object at creation. + + :param addr: A remote address + :return: What the socket's connect method returns + """ + _lib.SSL_set_connect_state(self._ssl) + return self._socket.connect(addr) # type: ignore[return-value, union-attr] + + def connect_ex(self, addr: Any) -> int: + """ + Call the :meth:`connect_ex` method of the underlying socket and set up + SSL on the socket, using the Context object supplied to this Connection + object at creation. Note that if the :meth:`connect_ex` method of the + socket doesn't return 0, SSL won't be initialized. + + :param addr: A remove address + :return: What the socket's connect_ex method returns + """ + connect_ex = self._socket.connect_ex # type: ignore[union-attr] + self.set_connect_state() + return connect_ex(addr) + + def accept(self) -> tuple[Connection, Any]: + """ + Call the :meth:`accept` method of the underlying socket and set up SSL + on the returned socket, using the Context object supplied to this + :class:`Connection` object at creation. + + :return: A *(conn, addr)* pair where *conn* is the new + :class:`Connection` object created, and *address* is as returned by + the socket's :meth:`accept`. + """ + client, addr = self._socket.accept() # type: ignore[union-attr] + conn = Connection(self._context, client) + conn.set_accept_state() + return (conn, addr) + + def DTLSv1_listen(self) -> None: + """ + Call the OpenSSL function DTLSv1_listen on this connection. See the + OpenSSL manual for more details. + + :return: None + """ + # Possible future extension: return the BIO_ADDR in some form. + bio_addr = _lib.BIO_ADDR_new() + try: + result = _lib.DTLSv1_listen(self._ssl, bio_addr) + finally: + _lib.BIO_ADDR_free(bio_addr) + # DTLSv1_listen is weird. A zero return value means 'didn't find a + # ClientHello with valid cookie, but keep trying'. So basically + # WantReadError. But it doesn't work correctly with _raise_ssl_error. + # So we raise it manually instead. + if self._cookie_generate_helper is not None: + self._cookie_generate_helper.raise_if_problem() + if self._cookie_verify_helper is not None: + self._cookie_verify_helper.raise_if_problem() + if result == 0: + raise WantReadError() + if result < 0: + self._raise_ssl_error(self._ssl, result) + + def DTLSv1_get_timeout(self) -> int | None: + """ + Determine when the DTLS SSL object next needs to perform internal + processing due to the passage of time. + + When the returned number of seconds have passed, the + :meth:`DTLSv1_handle_timeout` method needs to be called. + + :return: The time left in seconds before the next timeout or `None` + if no timeout is currently active. + """ + ptv_sec = _ffi.new("time_t *") + ptv_usec = _ffi.new("long *") + if _lib.Cryptography_DTLSv1_get_timeout(self._ssl, ptv_sec, ptv_usec): + return ptv_sec[0] + (ptv_usec[0] / 1000000) + else: + return None + + def DTLSv1_handle_timeout(self) -> bool: + """ + Handles any timeout events which have become pending on a DTLS SSL + object. + + :return: `True` if there was a pending timeout, `False` otherwise. + """ + result = _lib.DTLSv1_handle_timeout(self._ssl) + if result < 0: + self._raise_ssl_error(self._ssl, result) + assert False, "unreachable" + else: + return bool(result) + + def bio_shutdown(self) -> None: + """ + If the Connection was created with a memory BIO, this method can be + used to indicate that *end of file* has been reached on the read end of + that memory BIO. + + :return: None + """ + if self._from_ssl is None: + raise TypeError("Connection sock was not None") + + _lib.BIO_set_mem_eof_return(self._into_ssl, 0) + + def shutdown(self) -> bool: + """ + Send the shutdown message to the Connection. + + :return: True if the shutdown completed successfully (i.e. both sides + have sent closure alerts), False otherwise (in which case you + call :meth:`recv` or :meth:`send` when the connection becomes + readable/writeable). + """ + result = _lib.SSL_shutdown(self._ssl) + if result < 0: + self._raise_ssl_error(self._ssl, result) + assert False, "unreachable" + elif result > 0: + return True + else: + return False + + def get_cipher_list(self) -> list[str]: + """ + Retrieve the list of ciphers used by the Connection object. + + :return: A list of native cipher strings. + """ + ciphers = [] + for i in count(): + result = _lib.SSL_get_cipher_list(self._ssl, i) + if result == _ffi.NULL: + break + ciphers.append(_ffi.string(result).decode("utf-8")) + return ciphers + + def get_client_ca_list(self) -> list[X509Name]: + """ + Get CAs whose certificates are suggested for client authentication. + + :return: If this is a server connection, the list of certificate + authorities that will be sent or has been sent to the client, as + controlled by this :class:`Connection`'s :class:`Context`. + + If this is a client connection, the list will be empty until the + connection with the server is established. + + .. versionadded:: 0.10 + """ + ca_names = _lib.SSL_get_client_CA_list(self._ssl) + if ca_names == _ffi.NULL: + # TODO: This is untested. + return [] + + result = [] + for i in range(_lib.sk_X509_NAME_num(ca_names)): + name = _lib.sk_X509_NAME_value(ca_names, i) + copy = _lib.X509_NAME_dup(name) + _openssl_assert(copy != _ffi.NULL) + + pyname = X509Name.__new__(X509Name) + pyname._name = _ffi.gc(copy, _lib.X509_NAME_free) + result.append(pyname) + return result + + def makefile(self, *args: Any, **kwargs: Any) -> typing.NoReturn: + """ + The makefile() method is not implemented, since there is no dup + semantics for SSL connections + + :raise: NotImplementedError + """ + raise NotImplementedError( + "Cannot make file object of OpenSSL.SSL.Connection" + ) + + def get_app_data(self) -> Any: + """ + Retrieve application data as set by :meth:`set_app_data`. + + :return: The application data + """ + return self._app_data + + def set_app_data(self, data: Any) -> None: + """ + Set application data + + :param data: The application data + :return: None + """ + self._app_data = data + + def get_shutdown(self) -> int: + """ + Get the shutdown state of the Connection. + + :return: The shutdown state, a bitvector of SENT_SHUTDOWN, + RECEIVED_SHUTDOWN. + """ + return _lib.SSL_get_shutdown(self._ssl) + + def set_shutdown(self, state: int) -> None: + """ + Set the shutdown state of the Connection. + + :param state: bitvector of SENT_SHUTDOWN, RECEIVED_SHUTDOWN. + :return: None + """ + if not isinstance(state, int): + raise TypeError("state must be an integer") + + _lib.SSL_set_shutdown(self._ssl, state) + + def get_state_string(self) -> bytes: + """ + Retrieve a verbose string detailing the state of the Connection. + + :return: A string representing the state + """ + return _ffi.string(_lib.SSL_state_string_long(self._ssl)) + + def server_random(self) -> bytes | None: + """ + Retrieve the random value used with the server hello message. + + :return: A string representing the state + """ + session = _lib.SSL_get_session(self._ssl) + if session == _ffi.NULL: + return None + length = _lib.SSL_get_server_random(self._ssl, _ffi.NULL, 0) + _openssl_assert(length > 0) + outp = _no_zero_allocator("unsigned char[]", length) + _lib.SSL_get_server_random(self._ssl, outp, length) + return _ffi.buffer(outp, length)[:] + + def client_random(self) -> bytes | None: + """ + Retrieve the random value used with the client hello message. + + :return: A string representing the state + """ + session = _lib.SSL_get_session(self._ssl) + if session == _ffi.NULL: + return None + + length = _lib.SSL_get_client_random(self._ssl, _ffi.NULL, 0) + _openssl_assert(length > 0) + outp = _no_zero_allocator("unsigned char[]", length) + _lib.SSL_get_client_random(self._ssl, outp, length) + return _ffi.buffer(outp, length)[:] + + def master_key(self) -> bytes | None: + """ + Retrieve the value of the master key for this session. + + :return: A string representing the state + """ + session = _lib.SSL_get_session(self._ssl) + if session == _ffi.NULL: + return None + + length = _lib.SSL_SESSION_get_master_key(session, _ffi.NULL, 0) + _openssl_assert(length > 0) + outp = _no_zero_allocator("unsigned char[]", length) + _lib.SSL_SESSION_get_master_key(session, outp, length) + return _ffi.buffer(outp, length)[:] + + def export_keying_material( + self, label: bytes, olen: int, context: bytes | None = None + ) -> bytes: + """ + Obtain keying material for application use. + + :param: label - a disambiguating label string as described in RFC 5705 + :param: olen - the length of the exported key material in bytes + :param: context - a per-association context value + :return: the exported key material bytes or None + """ + outp = _no_zero_allocator("unsigned char[]", olen) + context_buf = _ffi.NULL + context_len = 0 + use_context = 0 + if context is not None: + context_buf = context + context_len = len(context) + use_context = 1 + success = _lib.SSL_export_keying_material( + self._ssl, + outp, + olen, + label, + len(label), + context_buf, + context_len, + use_context, + ) + _openssl_assert(success == 1) + return _ffi.buffer(outp, olen)[:] + + def sock_shutdown(self, *args: Any, **kwargs: Any) -> None: + """ + Call the :meth:`shutdown` method of the underlying socket. + See :manpage:`shutdown(2)`. + + :return: What the socket's shutdown() method returns + """ + return self._socket.shutdown(*args, **kwargs) # type: ignore[return-value, union-attr] + + @typing.overload + def get_certificate( + self, *, as_cryptography: typing.Literal[True] + ) -> x509.Certificate | None: + pass + + @typing.overload + def get_certificate( + self, *, as_cryptography: typing.Literal[False] = False + ) -> X509 | None: + pass + + def get_certificate( + self, + *, + as_cryptography: typing.Literal[True] | typing.Literal[False] = False, + ) -> X509 | x509.Certificate | None: + """ + Retrieve the local certificate (if any) + + :param bool as_cryptography: Controls whether a + ``cryptography.x509.Certificate`` or an ``OpenSSL.crypto.X509`` + object should be returned. + + :return: The local certificate + """ + cert = _lib.SSL_get_certificate(self._ssl) + if cert != _ffi.NULL: + _lib.X509_up_ref(cert) + pycert = X509._from_raw_x509_ptr(cert) + if as_cryptography: + return pycert.to_cryptography() + return pycert + return None + + @typing.overload + def get_peer_certificate( + self, *, as_cryptography: typing.Literal[True] + ) -> x509.Certificate | None: + pass + + @typing.overload + def get_peer_certificate( + self, *, as_cryptography: typing.Literal[False] = False + ) -> X509 | None: + pass + + def get_peer_certificate( + self, + *, + as_cryptography: typing.Literal[True] | typing.Literal[False] = False, + ) -> X509 | x509.Certificate | None: + """ + Retrieve the other side's certificate (if any) + + :param bool as_cryptography: Controls whether a + ``cryptography.x509.Certificate`` or an ``OpenSSL.crypto.X509`` + object should be returned. + + :return: The peer's certificate + """ + cert = _lib.SSL_get_peer_certificate(self._ssl) + if cert != _ffi.NULL: + pycert = X509._from_raw_x509_ptr(cert) + if as_cryptography: + return pycert.to_cryptography() + return pycert + return None + + @staticmethod + def _cert_stack_to_list(cert_stack: Any) -> list[X509]: + """ + Internal helper to convert a STACK_OF(X509) to a list of X509 + instances. + """ + result = [] + for i in range(_lib.sk_X509_num(cert_stack)): + cert = _lib.sk_X509_value(cert_stack, i) + _openssl_assert(cert != _ffi.NULL) + res = _lib.X509_up_ref(cert) + _openssl_assert(res >= 1) + pycert = X509._from_raw_x509_ptr(cert) + result.append(pycert) + return result + + @staticmethod + def _cert_stack_to_cryptography_list( + cert_stack: Any, + ) -> list[x509.Certificate]: + """ + Internal helper to convert a STACK_OF(X509) to a list of X509 + instances. + """ + result = [] + for i in range(_lib.sk_X509_num(cert_stack)): + cert = _lib.sk_X509_value(cert_stack, i) + _openssl_assert(cert != _ffi.NULL) + res = _lib.X509_up_ref(cert) + _openssl_assert(res >= 1) + pycert = X509._from_raw_x509_ptr(cert) + result.append(pycert.to_cryptography()) + return result + + @typing.overload + def get_peer_cert_chain( + self, *, as_cryptography: typing.Literal[True] + ) -> list[x509.Certificate] | None: + pass + + @typing.overload + def get_peer_cert_chain( + self, *, as_cryptography: typing.Literal[False] = False + ) -> list[X509] | None: + pass + + def get_peer_cert_chain( + self, + *, + as_cryptography: typing.Literal[True] | typing.Literal[False] = False, + ) -> list[X509] | list[x509.Certificate] | None: + """ + Retrieve the other side's certificate (if any) + + :param bool as_cryptography: Controls whether a list of + ``cryptography.x509.Certificate`` or ``OpenSSL.crypto.X509`` + object should be returned. + + :return: A list of X509 instances giving the peer's certificate chain, + or None if it does not have one. + """ + cert_stack = _lib.SSL_get_peer_cert_chain(self._ssl) + if cert_stack == _ffi.NULL: + return None + + if as_cryptography: + return self._cert_stack_to_cryptography_list(cert_stack) + return self._cert_stack_to_list(cert_stack) + + @typing.overload + def get_verified_chain( + self, *, as_cryptography: typing.Literal[True] + ) -> list[x509.Certificate] | None: + pass + + @typing.overload + def get_verified_chain( + self, *, as_cryptography: typing.Literal[False] = False + ) -> list[X509] | None: + pass + + def get_verified_chain( + self, + *, + as_cryptography: typing.Literal[True] | typing.Literal[False] = False, + ) -> list[X509] | list[x509.Certificate] | None: + """ + Retrieve the verified certificate chain of the peer including the + peer's end entity certificate. It must be called after a session has + been successfully established. If peer verification was not successful + the chain may be incomplete, invalid, or None. + + :param bool as_cryptography: Controls whether a list of + ``cryptography.x509.Certificate`` or ``OpenSSL.crypto.X509`` + object should be returned. + + :return: A list of X509 instances giving the peer's verified + certificate chain, or None if it does not have one. + + .. versionadded:: 20.0 + """ + # OpenSSL 1.1+ + cert_stack = _lib.SSL_get0_verified_chain(self._ssl) + if cert_stack == _ffi.NULL: + return None + + if as_cryptography: + return self._cert_stack_to_cryptography_list(cert_stack) + return self._cert_stack_to_list(cert_stack) + + def want_read(self) -> bool: + """ + Checks if more data has to be read from the transport layer to complete + an operation. + + :return: True iff more data has to be read + """ + return _lib.SSL_want_read(self._ssl) + + def want_write(self) -> bool: + """ + Checks if there is data to write to the transport layer to complete an + operation. + + :return: True iff there is data to write + """ + return _lib.SSL_want_write(self._ssl) + + def set_accept_state(self) -> None: + """ + Set the connection to work in server mode. The handshake will be + handled automatically by read/write. + + :return: None + """ + _lib.SSL_set_accept_state(self._ssl) + + def set_connect_state(self) -> None: + """ + Set the connection to work in client mode. The handshake will be + handled automatically by read/write. + + :return: None + """ + _lib.SSL_set_connect_state(self._ssl) + + def get_session(self) -> Session | None: + """ + Returns the Session currently used. + + :return: An instance of :class:`OpenSSL.SSL.Session` or + :obj:`None` if no session exists. + + .. versionadded:: 0.14 + """ + session = _lib.SSL_get1_session(self._ssl) + if session == _ffi.NULL: + return None + + pysession = Session.__new__(Session) + pysession._session = _ffi.gc(session, _lib.SSL_SESSION_free) + return pysession + + def set_session(self, session: Session) -> None: + """ + Set the session to be used when the TLS/SSL connection is established. + + :param session: A Session instance representing the session to use. + :returns: None + + .. versionadded:: 0.14 + """ + if not isinstance(session, Session): + raise TypeError("session must be a Session instance") + + result = _lib.SSL_set_session(self._ssl, session._session) + _openssl_assert(result == 1) + + def _get_finished_message( + self, function: Callable[[Any, Any, int], int] + ) -> bytes | None: + """ + Helper to implement :meth:`get_finished` and + :meth:`get_peer_finished`. + + :param function: Either :data:`SSL_get_finished`: or + :data:`SSL_get_peer_finished`. + + :return: :data:`None` if the desired message has not yet been + received, otherwise the contents of the message. + """ + # The OpenSSL documentation says nothing about what might happen if the + # count argument given is zero. Specifically, it doesn't say whether + # the output buffer may be NULL in that case or not. Inspection of the + # implementation reveals that it calls memcpy() unconditionally. + # Section 7.1.4, paragraph 1 of the C standard suggests that + # memcpy(NULL, source, 0) is not guaranteed to produce defined (let + # alone desirable) behavior (though it probably does on just about + # every implementation...) + # + # Allocate a tiny buffer to pass in (instead of just passing NULL as + # one might expect) for the initial call so as to be safe against this + # potentially undefined behavior. + empty = _ffi.new("char[]", 0) + size = function(self._ssl, empty, 0) + if size == 0: + # No Finished message so far. + return None + + buf = _no_zero_allocator("char[]", size) + function(self._ssl, buf, size) + return _ffi.buffer(buf, size)[:] + + def get_finished(self) -> bytes | None: + """ + Obtain the latest TLS Finished message that we sent. + + :return: The contents of the message or :obj:`None` if the TLS + handshake has not yet completed. + + .. versionadded:: 0.15 + """ + return self._get_finished_message(_lib.SSL_get_finished) + + def get_peer_finished(self) -> bytes | None: + """ + Obtain the latest TLS Finished message that we received from the peer. + + :return: The contents of the message or :obj:`None` if the TLS + handshake has not yet completed. + + .. versionadded:: 0.15 + """ + return self._get_finished_message(_lib.SSL_get_peer_finished) + + def get_cipher_name(self) -> str | None: + """ + Obtain the name of the currently used cipher. + + :returns: The name of the currently used cipher or :obj:`None` + if no connection has been established. + + .. versionadded:: 0.15 + """ + cipher = _lib.SSL_get_current_cipher(self._ssl) + if cipher == _ffi.NULL: + return None + else: + name = _ffi.string(_lib.SSL_CIPHER_get_name(cipher)) + return name.decode("utf-8") + + def get_cipher_bits(self) -> int | None: + """ + Obtain the number of secret bits of the currently used cipher. + + :returns: The number of secret bits of the currently used cipher + or :obj:`None` if no connection has been established. + + .. versionadded:: 0.15 + """ + cipher = _lib.SSL_get_current_cipher(self._ssl) + if cipher == _ffi.NULL: + return None + else: + return _lib.SSL_CIPHER_get_bits(cipher, _ffi.NULL) + + def get_cipher_version(self) -> str | None: + """ + Obtain the protocol version of the currently used cipher. + + :returns: The protocol name of the currently used cipher + or :obj:`None` if no connection has been established. + + .. versionadded:: 0.15 + """ + cipher = _lib.SSL_get_current_cipher(self._ssl) + if cipher == _ffi.NULL: + return None + else: + version = _ffi.string(_lib.SSL_CIPHER_get_version(cipher)) + return version.decode("utf-8") + + def get_protocol_version_name(self) -> str: + """ + Retrieve the protocol version of the current connection. + + :returns: The TLS version of the current connection, for example + the value for TLS 1.2 would be ``TLSv1.2``or ``Unknown`` + for connections that were not successfully established. + """ + version = _ffi.string(_lib.SSL_get_version(self._ssl)) + return version.decode("utf-8") + + def get_protocol_version(self) -> int: + """ + Retrieve the SSL or TLS protocol version of the current connection. + + :returns: The TLS version of the current connection. For example, + it will return ``0x769`` for connections made over TLS version 1. + """ + version = _lib.SSL_version(self._ssl) + return version + + def set_alpn_protos(self, protos: list[bytes]) -> None: + """ + Specify the client's ALPN protocol list. + + These protocols are offered to the server during protocol negotiation. + + :param protos: A list of the protocols to be offered to the server. + This list should be a Python list of bytestrings representing the + protocols to offer, e.g. ``[b'http/1.1', b'spdy/2']``. + """ + # Different versions of OpenSSL are inconsistent about how they handle + # empty proto lists (see #1043), so we avoid the problem entirely by + # rejecting them ourselves. + if not protos: + raise ValueError("at least one protocol must be specified") + + # Take the list of protocols and join them together, prefixing them + # with their lengths. + protostr = b"".join( + chain.from_iterable((bytes((len(p),)), p) for p in protos) + ) + + # Build a C string from the list. We don't need to save this off + # because OpenSSL immediately copies the data out. + input_str = _ffi.new("unsigned char[]", protostr) + + # https://www.openssl.org/docs/man1.1.0/man3/SSL_CTX_set_alpn_protos.html: + # SSL_CTX_set_alpn_protos() and SSL_set_alpn_protos() + # return 0 on success, and non-0 on failure. + # WARNING: these functions reverse the return value convention. + _openssl_assert( + _lib.SSL_set_alpn_protos(self._ssl, input_str, len(protostr)) == 0 + ) + + def get_alpn_proto_negotiated(self) -> bytes: + """ + Get the protocol that was negotiated by ALPN. + + :returns: A bytestring of the protocol name. If no protocol has been + negotiated yet, returns an empty bytestring. + """ + data = _ffi.new("unsigned char **") + data_len = _ffi.new("unsigned int *") + + _lib.SSL_get0_alpn_selected(self._ssl, data, data_len) + + if not data_len: + return b"" + + return _ffi.buffer(data[0], data_len[0])[:] + + def get_selected_srtp_profile(self) -> bytes: + """ + Get the SRTP protocol which was negotiated. + + :returns: A bytestring of the SRTP profile name. If no profile has been + negotiated yet, returns an empty bytestring. + """ + profile = _lib.SSL_get_selected_srtp_profile(self._ssl) + if not profile: + return b"" + + return _ffi.string(profile.name) + + def request_ocsp(self) -> None: + """ + Called to request that the server sends stapled OCSP data, if + available. If this is not called on the client side then the server + will not send OCSP data. Should be used in conjunction with + :meth:`Context.set_ocsp_client_callback`. + """ + rc = _lib.SSL_set_tlsext_status_type( + self._ssl, _lib.TLSEXT_STATUSTYPE_ocsp + ) + _openssl_assert(rc == 1) + + def set_info_callback( + self, callback: Callable[[Connection, int, int], None] + ) -> None: + """ + Set the information callback to *callback*. This function will be + called from time to time during SSL handshakes. + + :param callback: The Python callback to use. This should take three + arguments: a Connection object and two integers. The first integer + specifies where in the SSL handshake the function was called, and + the other the return code from a (possibly failed) internal + function call. + :return: None + """ + + @wraps(callback) + def wrapper(ssl, where, return_code): # type: ignore[no-untyped-def] + callback(Connection._reverse_mapping[ssl], where, return_code) + + self._info_callback = _ffi.callback( + "void (*)(const SSL *, int, int)", wrapper + ) + _lib.SSL_set_info_callback(self._ssl, self._info_callback) diff --git a/venv/lib/python3.12/site-packages/OpenSSL/__init__.py b/venv/lib/python3.12/site-packages/OpenSSL/__init__.py new file mode 100644 index 00000000..7b077cf7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/OpenSSL/__init__.py @@ -0,0 +1,31 @@ +# Copyright (C) AB Strakt +# See LICENSE for details. + +""" +pyOpenSSL - A simple wrapper around the OpenSSL library +""" + +from OpenSSL import SSL, crypto +from OpenSSL.version import ( + __author__, + __copyright__, + __email__, + __license__, + __summary__, + __title__, + __uri__, + __version__, +) + +__all__ = [ + "SSL", + "__author__", + "__copyright__", + "__email__", + "__license__", + "__summary__", + "__title__", + "__uri__", + "__version__", + "crypto", +] diff --git a/venv/lib/python3.12/site-packages/OpenSSL/__pycache__/SSL.cpython-312.pyc b/venv/lib/python3.12/site-packages/OpenSSL/__pycache__/SSL.cpython-312.pyc new file mode 100644 index 00000000..434547a0 Binary files /dev/null and b/venv/lib/python3.12/site-packages/OpenSSL/__pycache__/SSL.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/OpenSSL/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/OpenSSL/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..2fbd8e22 Binary files /dev/null and b/venv/lib/python3.12/site-packages/OpenSSL/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/OpenSSL/__pycache__/_util.cpython-312.pyc b/venv/lib/python3.12/site-packages/OpenSSL/__pycache__/_util.cpython-312.pyc new file mode 100644 index 00000000..577a1eee Binary files /dev/null and b/venv/lib/python3.12/site-packages/OpenSSL/__pycache__/_util.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/OpenSSL/__pycache__/crypto.cpython-312.pyc b/venv/lib/python3.12/site-packages/OpenSSL/__pycache__/crypto.cpython-312.pyc new file mode 100644 index 00000000..36556429 Binary files /dev/null and b/venv/lib/python3.12/site-packages/OpenSSL/__pycache__/crypto.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/OpenSSL/__pycache__/debug.cpython-312.pyc b/venv/lib/python3.12/site-packages/OpenSSL/__pycache__/debug.cpython-312.pyc new file mode 100644 index 00000000..1839eda3 Binary files /dev/null and b/venv/lib/python3.12/site-packages/OpenSSL/__pycache__/debug.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/OpenSSL/__pycache__/rand.cpython-312.pyc b/venv/lib/python3.12/site-packages/OpenSSL/__pycache__/rand.cpython-312.pyc new file mode 100644 index 00000000..92d8c59f Binary files /dev/null and b/venv/lib/python3.12/site-packages/OpenSSL/__pycache__/rand.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/OpenSSL/__pycache__/version.cpython-312.pyc b/venv/lib/python3.12/site-packages/OpenSSL/__pycache__/version.cpython-312.pyc new file mode 100644 index 00000000..ac87e906 Binary files /dev/null and b/venv/lib/python3.12/site-packages/OpenSSL/__pycache__/version.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/OpenSSL/_util.py b/venv/lib/python3.12/site-packages/OpenSSL/_util.py new file mode 100644 index 00000000..41a64526 --- /dev/null +++ b/venv/lib/python3.12/site-packages/OpenSSL/_util.py @@ -0,0 +1,129 @@ +from __future__ import annotations + +import os +import sys +import warnings +from typing import Any, Callable, NoReturn, Union + +from cryptography.hazmat.bindings.openssl.binding import Binding + +if sys.version_info >= (3, 9): + StrOrBytesPath = Union[str, bytes, os.PathLike[str], os.PathLike[bytes]] +else: + StrOrBytesPath = Union[str, bytes, os.PathLike] + +binding = Binding() +ffi = binding.ffi +lib: Any = binding.lib + + +# This is a special CFFI allocator that does not bother to zero its memory +# after allocation. This has vastly better performance on large allocations and +# so should be used whenever we don't need the memory zeroed out. +no_zero_allocator = ffi.new_allocator(should_clear_after_alloc=False) + + +def text(charp: Any) -> str: + """ + Get a native string type representing of the given CFFI ``char*`` object. + + :param charp: A C-style string represented using CFFI. + + :return: :class:`str` + """ + if not charp: + return "" + return ffi.string(charp).decode("utf-8") + + +def exception_from_error_queue(exception_type: type[Exception]) -> NoReturn: + """ + Convert an OpenSSL library failure into a Python exception. + + When a call to the native OpenSSL library fails, this is usually signalled + by the return value, and an error code is stored in an error queue + associated with the current thread. The err library provides functions to + obtain these error codes and textual error messages. + """ + errors = [] + + while True: + error = lib.ERR_get_error() + if error == 0: + break + errors.append( + ( + text(lib.ERR_lib_error_string(error)), + text(lib.ERR_func_error_string(error)), + text(lib.ERR_reason_error_string(error)), + ) + ) + + raise exception_type(errors) + + +def make_assert(error: type[Exception]) -> Callable[[bool], Any]: + """ + Create an assert function that uses :func:`exception_from_error_queue` to + raise an exception wrapped by *error*. + """ + + def openssl_assert(ok: bool) -> None: + """ + If *ok* is not True, retrieve the error from OpenSSL and raise it. + """ + if ok is not True: + exception_from_error_queue(error) + + return openssl_assert + + +def path_bytes(s: StrOrBytesPath) -> bytes: + """ + Convert a Python path to a :py:class:`bytes` for the path which can be + passed into an OpenSSL API accepting a filename. + + :param s: A path (valid for os.fspath). + + :return: An instance of :py:class:`bytes`. + """ + b = os.fspath(s) + + if isinstance(b, str): + return b.encode(sys.getfilesystemencoding()) + else: + return b + + +def byte_string(s: str) -> bytes: + return s.encode("charmap") + + +# A marker object to observe whether some optional arguments are passed any +# value or not. +UNSPECIFIED = object() + +_TEXT_WARNING = "str for {0} is no longer accepted, use bytes" + + +def text_to_bytes_and_warn(label: str, obj: Any) -> Any: + """ + If ``obj`` is text, emit a warning that it should be bytes instead and try + to convert it to bytes automatically. + + :param str label: The name of the parameter from which ``obj`` was taken + (so a developer can easily find the source of the problem and correct + it). + + :return: If ``obj`` is the text string type, a ``bytes`` object giving the + UTF-8 encoding of that text is returned. Otherwise, ``obj`` itself is + returned. + """ + if isinstance(obj, str): + warnings.warn( + _TEXT_WARNING.format(label), + category=DeprecationWarning, + stacklevel=3, + ) + return obj.encode("utf-8") + return obj diff --git a/venv/lib/python3.12/site-packages/OpenSSL/crypto.py b/venv/lib/python3.12/site-packages/OpenSSL/crypto.py new file mode 100644 index 00000000..366007e8 --- /dev/null +++ b/venv/lib/python3.12/site-packages/OpenSSL/crypto.py @@ -0,0 +1,2450 @@ +from __future__ import annotations + +import calendar +import datetime +import functools +import sys +import typing +import warnings +from base64 import b16encode +from collections.abc import Iterable, Sequence +from functools import partial +from typing import ( + Any, + Callable, + Union, +) + +if sys.version_info >= (3, 13): + from warnings import deprecated +elif sys.version_info < (3, 8): + _T = typing.TypeVar("T") + + def deprecated(msg: str, **kwargs: object) -> Callable[[_T], _T]: + return lambda f: f +else: + from typing_extensions import deprecated + +from cryptography import utils, x509 +from cryptography.hazmat.primitives.asymmetric import ( + dsa, + ec, + ed448, + ed25519, + rsa, +) + +from OpenSSL._util import StrOrBytesPath +from OpenSSL._util import ( + byte_string as _byte_string, +) +from OpenSSL._util import ( + exception_from_error_queue as _exception_from_error_queue, +) +from OpenSSL._util import ( + ffi as _ffi, +) +from OpenSSL._util import ( + lib as _lib, +) +from OpenSSL._util import ( + make_assert as _make_assert, +) +from OpenSSL._util import ( + path_bytes as _path_bytes, +) + +__all__ = [ + "FILETYPE_ASN1", + "FILETYPE_PEM", + "FILETYPE_TEXT", + "TYPE_DSA", + "TYPE_RSA", + "X509", + "Error", + "PKey", + "X509Extension", + "X509Name", + "X509Req", + "X509Store", + "X509StoreContext", + "X509StoreContextError", + "X509StoreFlags", + "dump_certificate", + "dump_certificate_request", + "dump_privatekey", + "dump_publickey", + "get_elliptic_curve", + "get_elliptic_curves", + "load_certificate", + "load_certificate_request", + "load_privatekey", + "load_publickey", +] + + +_PrivateKey = Union[ + dsa.DSAPrivateKey, + ec.EllipticCurvePrivateKey, + ed25519.Ed25519PrivateKey, + ed448.Ed448PrivateKey, + rsa.RSAPrivateKey, +] +_PublicKey = Union[ + dsa.DSAPublicKey, + ec.EllipticCurvePublicKey, + ed25519.Ed25519PublicKey, + ed448.Ed448PublicKey, + rsa.RSAPublicKey, +] +_Key = Union[_PrivateKey, _PublicKey] +PassphraseCallableT = Union[bytes, Callable[..., bytes]] + + +FILETYPE_PEM: int = _lib.SSL_FILETYPE_PEM +FILETYPE_ASN1: int = _lib.SSL_FILETYPE_ASN1 + +# TODO This was an API mistake. OpenSSL has no such constant. +FILETYPE_TEXT = 2**16 - 1 + +TYPE_RSA: int = _lib.EVP_PKEY_RSA +TYPE_DSA: int = _lib.EVP_PKEY_DSA +TYPE_DH: int = _lib.EVP_PKEY_DH +TYPE_EC: int = _lib.EVP_PKEY_EC + + +class Error(Exception): + """ + An error occurred in an `OpenSSL.crypto` API. + """ + + +_raise_current_error = partial(_exception_from_error_queue, Error) +_openssl_assert = _make_assert(Error) + + +def _new_mem_buf(buffer: bytes | None = None) -> Any: + """ + Allocate a new OpenSSL memory BIO. + + Arrange for the garbage collector to clean it up automatically. + + :param buffer: None or some bytes to use to put into the BIO so that they + can be read out. + """ + if buffer is None: + bio = _lib.BIO_new(_lib.BIO_s_mem()) + free = _lib.BIO_free + else: + data = _ffi.new("char[]", buffer) + bio = _lib.BIO_new_mem_buf(data, len(buffer)) + + # Keep the memory alive as long as the bio is alive! + def free(bio: Any, ref: Any = data) -> Any: + return _lib.BIO_free(bio) + + _openssl_assert(bio != _ffi.NULL) + + bio = _ffi.gc(bio, free) + return bio + + +def _bio_to_string(bio: Any) -> bytes: + """ + Copy the contents of an OpenSSL BIO object into a Python byte string. + """ + result_buffer = _ffi.new("char**") + buffer_length = _lib.BIO_get_mem_data(bio, result_buffer) + return _ffi.buffer(result_buffer[0], buffer_length)[:] + + +def _set_asn1_time(boundary: Any, when: bytes) -> None: + """ + The the time value of an ASN1 time object. + + @param boundary: An ASN1_TIME pointer (or an object safely + castable to that type) which will have its value set. + @param when: A string representation of the desired time value. + + @raise TypeError: If C{when} is not a L{bytes} string. + @raise ValueError: If C{when} does not represent a time in the required + format. + @raise RuntimeError: If the time value cannot be set for some other + (unspecified) reason. + """ + if not isinstance(when, bytes): + raise TypeError("when must be a byte string") + # ASN1_TIME_set_string validates the string without writing anything + # when the destination is NULL. + _openssl_assert(boundary != _ffi.NULL) + + set_result = _lib.ASN1_TIME_set_string(boundary, when) + if set_result == 0: + raise ValueError("Invalid string") + + +def _new_asn1_time(when: bytes) -> Any: + """ + Behaves like _set_asn1_time but returns a new ASN1_TIME object. + + @param when: A string representation of the desired time value. + + @raise TypeError: If C{when} is not a L{bytes} string. + @raise ValueError: If C{when} does not represent a time in the required + format. + @raise RuntimeError: If the time value cannot be set for some other + (unspecified) reason. + """ + ret = _lib.ASN1_TIME_new() + _openssl_assert(ret != _ffi.NULL) + ret = _ffi.gc(ret, _lib.ASN1_TIME_free) + _set_asn1_time(ret, when) + return ret + + +def _get_asn1_time(timestamp: Any) -> bytes | None: + """ + Retrieve the time value of an ASN1 time object. + + @param timestamp: An ASN1_GENERALIZEDTIME* (or an object safely castable to + that type) from which the time value will be retrieved. + + @return: The time value from C{timestamp} as a L{bytes} string in a certain + format. Or C{None} if the object contains no time value. + """ + string_timestamp = _ffi.cast("ASN1_STRING*", timestamp) + if _lib.ASN1_STRING_length(string_timestamp) == 0: + return None + elif ( + _lib.ASN1_STRING_type(string_timestamp) == _lib.V_ASN1_GENERALIZEDTIME + ): + return _ffi.string(_lib.ASN1_STRING_get0_data(string_timestamp)) + else: + generalized_timestamp = _ffi.new("ASN1_GENERALIZEDTIME**") + _lib.ASN1_TIME_to_generalizedtime(timestamp, generalized_timestamp) + _openssl_assert(generalized_timestamp[0] != _ffi.NULL) + + string_timestamp = _ffi.cast("ASN1_STRING*", generalized_timestamp[0]) + string_data = _lib.ASN1_STRING_get0_data(string_timestamp) + string_result = _ffi.string(string_data) + _lib.ASN1_GENERALIZEDTIME_free(generalized_timestamp[0]) + return string_result + + +class _X509NameInvalidator: + def __init__(self) -> None: + self._names: list[X509Name] = [] + + def add(self, name: X509Name) -> None: + self._names.append(name) + + def clear(self) -> None: + for name in self._names: + # Breaks the object, but also prevents UAF! + del name._name + + +class PKey: + """ + A class representing an DSA or RSA public key or key pair. + """ + + _only_public = False + _initialized = True + + def __init__(self) -> None: + pkey = _lib.EVP_PKEY_new() + self._pkey = _ffi.gc(pkey, _lib.EVP_PKEY_free) + self._initialized = False + + def to_cryptography_key(self) -> _Key: + """ + Export as a ``cryptography`` key. + + :rtype: One of ``cryptography``'s `key interfaces`_. + + .. _key interfaces: https://cryptography.io/en/latest/hazmat/\ + primitives/asymmetric/rsa/#key-interfaces + + .. versionadded:: 16.1.0 + """ + from cryptography.hazmat.primitives.serialization import ( + load_der_private_key, + load_der_public_key, + ) + + if self._only_public: + der = dump_publickey(FILETYPE_ASN1, self) + return typing.cast(_Key, load_der_public_key(der)) + else: + der = dump_privatekey(FILETYPE_ASN1, self) + return typing.cast(_Key, load_der_private_key(der, password=None)) + + @classmethod + def from_cryptography_key(cls, crypto_key: _Key) -> PKey: + """ + Construct based on a ``cryptography`` *crypto_key*. + + :param crypto_key: A ``cryptography`` key. + :type crypto_key: One of ``cryptography``'s `key interfaces`_. + + :rtype: PKey + + .. versionadded:: 16.1.0 + """ + if not isinstance( + crypto_key, + ( + dsa.DSAPrivateKey, + dsa.DSAPublicKey, + ec.EllipticCurvePrivateKey, + ec.EllipticCurvePublicKey, + ed25519.Ed25519PrivateKey, + ed25519.Ed25519PublicKey, + ed448.Ed448PrivateKey, + ed448.Ed448PublicKey, + rsa.RSAPrivateKey, + rsa.RSAPublicKey, + ), + ): + raise TypeError("Unsupported key type") + + from cryptography.hazmat.primitives.serialization import ( + Encoding, + NoEncryption, + PrivateFormat, + PublicFormat, + ) + + if isinstance( + crypto_key, + ( + dsa.DSAPublicKey, + ec.EllipticCurvePublicKey, + ed25519.Ed25519PublicKey, + ed448.Ed448PublicKey, + rsa.RSAPublicKey, + ), + ): + return load_publickey( + FILETYPE_ASN1, + crypto_key.public_bytes( + Encoding.DER, PublicFormat.SubjectPublicKeyInfo + ), + ) + else: + der = crypto_key.private_bytes( + Encoding.DER, PrivateFormat.PKCS8, NoEncryption() + ) + return load_privatekey(FILETYPE_ASN1, der) + + def generate_key(self, type: int, bits: int) -> None: + """ + Generate a key pair of the given type, with the given number of bits. + + This generates a key "into" the this object. + + :param type: The key type. + :type type: :py:data:`TYPE_RSA` or :py:data:`TYPE_DSA` + :param bits: The number of bits. + :type bits: :py:data:`int` ``>= 0`` + :raises TypeError: If :py:data:`type` or :py:data:`bits` isn't + of the appropriate type. + :raises ValueError: If the number of bits isn't an integer of + the appropriate size. + :return: ``None`` + """ + if not isinstance(type, int): + raise TypeError("type must be an integer") + + if not isinstance(bits, int): + raise TypeError("bits must be an integer") + + if type == TYPE_RSA: + if bits <= 0: + raise ValueError("Invalid number of bits") + + # TODO Check error return + exponent = _lib.BN_new() + exponent = _ffi.gc(exponent, _lib.BN_free) + _lib.BN_set_word(exponent, _lib.RSA_F4) + + rsa = _lib.RSA_new() + + result = _lib.RSA_generate_key_ex(rsa, bits, exponent, _ffi.NULL) + _openssl_assert(result == 1) + + result = _lib.EVP_PKEY_assign_RSA(self._pkey, rsa) + _openssl_assert(result == 1) + + elif type == TYPE_DSA: + dsa = _lib.DSA_new() + _openssl_assert(dsa != _ffi.NULL) + + dsa = _ffi.gc(dsa, _lib.DSA_free) + res = _lib.DSA_generate_parameters_ex( + dsa, bits, _ffi.NULL, 0, _ffi.NULL, _ffi.NULL, _ffi.NULL + ) + _openssl_assert(res == 1) + + _openssl_assert(_lib.DSA_generate_key(dsa) == 1) + _openssl_assert(_lib.EVP_PKEY_set1_DSA(self._pkey, dsa) == 1) + else: + raise Error("No such key type") + + self._initialized = True + + def check(self) -> bool: + """ + Check the consistency of an RSA private key. + + This is the Python equivalent of OpenSSL's ``RSA_check_key``. + + :return: ``True`` if key is consistent. + + :raise OpenSSL.crypto.Error: if the key is inconsistent. + + :raise TypeError: if the key is of a type which cannot be checked. + Only RSA keys can currently be checked. + """ + if self._only_public: + raise TypeError("public key only") + + if _lib.EVP_PKEY_type(self.type()) != _lib.EVP_PKEY_RSA: + raise TypeError("Only RSA keys can currently be checked.") + + rsa = _lib.EVP_PKEY_get1_RSA(self._pkey) + rsa = _ffi.gc(rsa, _lib.RSA_free) + result = _lib.RSA_check_key(rsa) + if result == 1: + return True + _raise_current_error() + + def type(self) -> int: + """ + Returns the type of the key + + :return: The type of the key. + """ + return _lib.EVP_PKEY_id(self._pkey) + + def bits(self) -> int: + """ + Returns the number of bits of the key + + :return: The number of bits of the key. + """ + return _lib.EVP_PKEY_bits(self._pkey) + + +class _EllipticCurve: + """ + A representation of a supported elliptic curve. + + @cvar _curves: :py:obj:`None` until an attempt is made to load the curves. + Thereafter, a :py:type:`set` containing :py:type:`_EllipticCurve` + instances each of which represents one curve supported by the system. + @type _curves: :py:type:`NoneType` or :py:type:`set` + """ + + _curves = None + + def __ne__(self, other: Any) -> bool: + """ + Implement cooperation with the right-hand side argument of ``!=``. + + Python 3 seems to have dropped this cooperation in this very narrow + circumstance. + """ + if isinstance(other, _EllipticCurve): + return super().__ne__(other) + return NotImplemented + + @classmethod + def _load_elliptic_curves(cls, lib: Any) -> set[_EllipticCurve]: + """ + Get the curves supported by OpenSSL. + + :param lib: The OpenSSL library binding object. + + :return: A :py:type:`set` of ``cls`` instances giving the names of the + elliptic curves the underlying library supports. + """ + num_curves = lib.EC_get_builtin_curves(_ffi.NULL, 0) + builtin_curves = _ffi.new("EC_builtin_curve[]", num_curves) + # The return value on this call should be num_curves again. We + # could check it to make sure but if it *isn't* then.. what could + # we do? Abort the whole process, I suppose...? -exarkun + lib.EC_get_builtin_curves(builtin_curves, num_curves) + return set(cls.from_nid(lib, c.nid) for c in builtin_curves) + + @classmethod + def _get_elliptic_curves(cls, lib: Any) -> set[_EllipticCurve]: + """ + Get, cache, and return the curves supported by OpenSSL. + + :param lib: The OpenSSL library binding object. + + :return: A :py:type:`set` of ``cls`` instances giving the names of the + elliptic curves the underlying library supports. + """ + if cls._curves is None: + cls._curves = cls._load_elliptic_curves(lib) + return cls._curves + + @classmethod + def from_nid(cls, lib: Any, nid: int) -> _EllipticCurve: + """ + Instantiate a new :py:class:`_EllipticCurve` associated with the given + OpenSSL NID. + + :param lib: The OpenSSL library binding object. + + :param nid: The OpenSSL NID the resulting curve object will represent. + This must be a curve NID (and not, for example, a hash NID) or + subsequent operations will fail in unpredictable ways. + :type nid: :py:class:`int` + + :return: The curve object. + """ + return cls(lib, nid, _ffi.string(lib.OBJ_nid2sn(nid)).decode("ascii")) + + def __init__(self, lib: Any, nid: int, name: str) -> None: + """ + :param _lib: The :py:mod:`cryptography` binding instance used to + interface with OpenSSL. + + :param _nid: The OpenSSL NID identifying the curve this object + represents. + :type _nid: :py:class:`int` + + :param name: The OpenSSL short name identifying the curve this object + represents. + :type name: :py:class:`unicode` + """ + self._lib = lib + self._nid = nid + self.name = name + + def __repr__(self) -> str: + return f"" + + def _to_EC_KEY(self) -> Any: + """ + Create a new OpenSSL EC_KEY structure initialized to use this curve. + + The structure is automatically garbage collected when the Python object + is garbage collected. + """ + key = self._lib.EC_KEY_new_by_curve_name(self._nid) + return _ffi.gc(key, _lib.EC_KEY_free) + + +@deprecated( + "get_elliptic_curves is deprecated. You should use the APIs in " + "cryptography instead." +) +def get_elliptic_curves() -> set[_EllipticCurve]: + """ + Return a set of objects representing the elliptic curves supported in the + OpenSSL build in use. + + The curve objects have a :py:class:`unicode` ``name`` attribute by which + they identify themselves. + + The curve objects are useful as values for the argument accepted by + :py:meth:`Context.set_tmp_ecdh` to specify which elliptical curve should be + used for ECDHE key exchange. + """ + return _EllipticCurve._get_elliptic_curves(_lib) + + +@deprecated( + "get_elliptic_curve is deprecated. You should use the APIs in " + "cryptography instead." +) +def get_elliptic_curve(name: str) -> _EllipticCurve: + """ + Return a single curve object selected by name. + + See :py:func:`get_elliptic_curves` for information about curve objects. + + :param name: The OpenSSL short name identifying the curve object to + retrieve. + :type name: :py:class:`unicode` + + If the named curve is not supported then :py:class:`ValueError` is raised. + """ + for curve in get_elliptic_curves(): + if curve.name == name: + return curve + raise ValueError("unknown curve name", name) + + +@functools.total_ordering +class X509Name: + """ + An X.509 Distinguished Name. + + :ivar countryName: The country of the entity. + :ivar C: Alias for :py:attr:`countryName`. + + :ivar stateOrProvinceName: The state or province of the entity. + :ivar ST: Alias for :py:attr:`stateOrProvinceName`. + + :ivar localityName: The locality of the entity. + :ivar L: Alias for :py:attr:`localityName`. + + :ivar organizationName: The organization name of the entity. + :ivar O: Alias for :py:attr:`organizationName`. + + :ivar organizationalUnitName: The organizational unit of the entity. + :ivar OU: Alias for :py:attr:`organizationalUnitName` + + :ivar commonName: The common name of the entity. + :ivar CN: Alias for :py:attr:`commonName`. + + :ivar emailAddress: The e-mail address of the entity. + """ + + def __init__(self, name: X509Name) -> None: + """ + Create a new X509Name, copying the given X509Name instance. + + :param name: The name to copy. + :type name: :py:class:`X509Name` + """ + name = _lib.X509_NAME_dup(name._name) + self._name: Any = _ffi.gc(name, _lib.X509_NAME_free) + + def __setattr__(self, name: str, value: Any) -> None: + if name.startswith("_"): + return super().__setattr__(name, value) + + # Note: we really do not want str subclasses here, so we do not use + # isinstance. + if type(name) is not str: + raise TypeError( + f"attribute name must be string, not " + f"'{type(value).__name__:.200}'" + ) + + nid = _lib.OBJ_txt2nid(_byte_string(name)) + if nid == _lib.NID_undef: + try: + _raise_current_error() + except Error: + pass + raise AttributeError("No such attribute") + + # If there's an old entry for this NID, remove it + for i in range(_lib.X509_NAME_entry_count(self._name)): + ent = _lib.X509_NAME_get_entry(self._name, i) + ent_obj = _lib.X509_NAME_ENTRY_get_object(ent) + ent_nid = _lib.OBJ_obj2nid(ent_obj) + if nid == ent_nid: + ent = _lib.X509_NAME_delete_entry(self._name, i) + _lib.X509_NAME_ENTRY_free(ent) + break + + if isinstance(value, str): + value = value.encode("utf-8") + + add_result = _lib.X509_NAME_add_entry_by_NID( + self._name, nid, _lib.MBSTRING_UTF8, value, -1, -1, 0 + ) + if not add_result: + _raise_current_error() + + def __getattr__(self, name: str) -> str | None: + """ + Find attribute. An X509Name object has the following attributes: + countryName (alias C), stateOrProvince (alias ST), locality (alias L), + organization (alias O), organizationalUnit (alias OU), commonName + (alias CN) and more... + """ + nid = _lib.OBJ_txt2nid(_byte_string(name)) + if nid == _lib.NID_undef: + # This is a bit weird. OBJ_txt2nid indicated failure, but it seems + # a lower level function, a2d_ASN1_OBJECT, also feels the need to + # push something onto the error queue. If we don't clean that up + # now, someone else will bump into it later and be quite confused. + # See lp#314814. + try: + _raise_current_error() + except Error: + pass + raise AttributeError("No such attribute") + + entry_index = _lib.X509_NAME_get_index_by_NID(self._name, nid, -1) + if entry_index == -1: + return None + + entry = _lib.X509_NAME_get_entry(self._name, entry_index) + data = _lib.X509_NAME_ENTRY_get_data(entry) + + result_buffer = _ffi.new("unsigned char**") + data_length = _lib.ASN1_STRING_to_UTF8(result_buffer, data) + _openssl_assert(data_length >= 0) + + try: + result = _ffi.buffer(result_buffer[0], data_length)[:].decode( + "utf-8" + ) + finally: + # XXX untested + _lib.OPENSSL_free(result_buffer[0]) + return result + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, X509Name): + return NotImplemented + + return _lib.X509_NAME_cmp(self._name, other._name) == 0 + + def __lt__(self, other: Any) -> bool: + if not isinstance(other, X509Name): + return NotImplemented + + return _lib.X509_NAME_cmp(self._name, other._name) < 0 + + def __repr__(self) -> str: + """ + String representation of an X509Name + """ + result_buffer = _ffi.new("char[]", 512) + format_result = _lib.X509_NAME_oneline( + self._name, result_buffer, len(result_buffer) + ) + _openssl_assert(format_result != _ffi.NULL) + + return "".format( + _ffi.string(result_buffer).decode("utf-8"), + ) + + def hash(self) -> int: + """ + Return an integer representation of the first four bytes of the + MD5 digest of the DER representation of the name. + + This is the Python equivalent of OpenSSL's ``X509_NAME_hash``. + + :return: The (integer) hash of this name. + :rtype: :py:class:`int` + """ + return _lib.X509_NAME_hash(self._name) + + def der(self) -> bytes: + """ + Return the DER encoding of this name. + + :return: The DER encoded form of this name. + :rtype: :py:class:`bytes` + """ + result_buffer = _ffi.new("unsigned char**") + encode_result = _lib.i2d_X509_NAME(self._name, result_buffer) + _openssl_assert(encode_result >= 0) + + string_result = _ffi.buffer(result_buffer[0], encode_result)[:] + _lib.OPENSSL_free(result_buffer[0]) + return string_result + + def get_components(self) -> list[tuple[bytes, bytes]]: + """ + Returns the components of this name, as a sequence of 2-tuples. + + :return: The components of this name. + :rtype: :py:class:`list` of ``name, value`` tuples. + """ + result = [] + for i in range(_lib.X509_NAME_entry_count(self._name)): + ent = _lib.X509_NAME_get_entry(self._name, i) + + fname = _lib.X509_NAME_ENTRY_get_object(ent) + fval = _lib.X509_NAME_ENTRY_get_data(ent) + + nid = _lib.OBJ_obj2nid(fname) + name = _lib.OBJ_nid2sn(nid) + + # ffi.string does not handle strings containing NULL bytes + # (which may have been generated by old, broken software) + value = _ffi.buffer( + _lib.ASN1_STRING_get0_data(fval), _lib.ASN1_STRING_length(fval) + )[:] + result.append((_ffi.string(name), value)) + + return result + + +@deprecated( + "X509Extension support in pyOpenSSL is deprecated. You should use the " + "APIs in cryptography." +) +class X509Extension: + """ + An X.509 v3 certificate extension. + + .. deprecated:: 23.3.0 + Use cryptography's X509 APIs instead. + """ + + def __init__( + self, + type_name: bytes, + critical: bool, + value: bytes, + subject: X509 | None = None, + issuer: X509 | None = None, + ) -> None: + """ + Initializes an X509 extension. + + :param type_name: The name of the type of extension_ to create. + :type type_name: :py:data:`bytes` + + :param bool critical: A flag indicating whether this is a critical + extension. + + :param value: The OpenSSL textual representation of the extension's + value. + :type value: :py:data:`bytes` + + :param subject: Optional X509 certificate to use as subject. + :type subject: :py:class:`X509` + + :param issuer: Optional X509 certificate to use as issuer. + :type issuer: :py:class:`X509` + + .. _extension: https://www.openssl.org/docs/manmaster/man5/ + x509v3_config.html#STANDARD-EXTENSIONS + """ + ctx = _ffi.new("X509V3_CTX*") + + # A context is necessary for any extension which uses the r2i + # conversion method. That is, X509V3_EXT_nconf may segfault if passed + # a NULL ctx. Start off by initializing most of the fields to NULL. + _lib.X509V3_set_ctx(ctx, _ffi.NULL, _ffi.NULL, _ffi.NULL, _ffi.NULL, 0) + + # We have no configuration database - but perhaps we should (some + # extensions may require it). + _lib.X509V3_set_ctx_nodb(ctx) + + # Initialize the subject and issuer, if appropriate. ctx is a local, + # and as far as I can tell none of the X509V3_* APIs invoked here steal + # any references, so no need to mess with reference counts or + # duplicates. + if issuer is not None: + if not isinstance(issuer, X509): + raise TypeError("issuer must be an X509 instance") + ctx.issuer_cert = issuer._x509 + if subject is not None: + if not isinstance(subject, X509): + raise TypeError("subject must be an X509 instance") + ctx.subject_cert = subject._x509 + + if critical: + # There are other OpenSSL APIs which would let us pass in critical + # separately, but they're harder to use, and since value is already + # a pile of crappy junk smuggling a ton of utterly important + # structured data, what's the point of trying to avoid nasty stuff + # with strings? (However, X509V3_EXT_i2d in particular seems like + # it would be a better API to invoke. I do not know where to get + # the ext_struc it desires for its last parameter, though.) + value = b"critical," + value + + extension = _lib.X509V3_EXT_nconf(_ffi.NULL, ctx, type_name, value) + if extension == _ffi.NULL: + _raise_current_error() + self._extension = _ffi.gc(extension, _lib.X509_EXTENSION_free) + + @property + def _nid(self) -> Any: + return _lib.OBJ_obj2nid( + _lib.X509_EXTENSION_get_object(self._extension) + ) + + _prefixes: typing.ClassVar[dict[int, str]] = { + _lib.GEN_EMAIL: "email", + _lib.GEN_DNS: "DNS", + _lib.GEN_URI: "URI", + } + + def _subjectAltNameString(self) -> str: + names = _ffi.cast( + "GENERAL_NAMES*", _lib.X509V3_EXT_d2i(self._extension) + ) + + names = _ffi.gc(names, _lib.GENERAL_NAMES_free) + parts = [] + for i in range(_lib.sk_GENERAL_NAME_num(names)): + name = _lib.sk_GENERAL_NAME_value(names, i) + try: + label = self._prefixes[name.type] + except KeyError: + bio = _new_mem_buf() + _lib.GENERAL_NAME_print(bio, name) + parts.append(_bio_to_string(bio).decode("utf-8")) + else: + value = _ffi.buffer(name.d.ia5.data, name.d.ia5.length)[ + : + ].decode("utf-8") + parts.append(label + ":" + value) + return ", ".join(parts) + + def __str__(self) -> str: + """ + :return: a nice text representation of the extension + """ + if _lib.NID_subject_alt_name == self._nid: + return self._subjectAltNameString() + + bio = _new_mem_buf() + print_result = _lib.X509V3_EXT_print(bio, self._extension, 0, 0) + _openssl_assert(print_result != 0) + + return _bio_to_string(bio).decode("utf-8") + + def get_critical(self) -> bool: + """ + Returns the critical field of this X.509 extension. + + :return: The critical field. + """ + return _lib.X509_EXTENSION_get_critical(self._extension) + + def get_short_name(self) -> bytes: + """ + Returns the short type name of this X.509 extension. + + The result is a byte string such as :py:const:`b"basicConstraints"`. + + :return: The short type name. + :rtype: :py:data:`bytes` + + .. versionadded:: 0.12 + """ + obj = _lib.X509_EXTENSION_get_object(self._extension) + nid = _lib.OBJ_obj2nid(obj) + # OpenSSL 3.1.0 has a bug where nid2sn returns NULL for NIDs that + # previously returned UNDEF. This is a workaround for that issue. + # https://github.com/openssl/openssl/commit/908ba3ed9adbb3df90f76 + buf = _lib.OBJ_nid2sn(nid) + if buf != _ffi.NULL: + return _ffi.string(buf) + else: + return b"UNDEF" + + def get_data(self) -> bytes: + """ + Returns the data of the X509 extension, encoded as ASN.1. + + :return: The ASN.1 encoded data of this X509 extension. + :rtype: :py:data:`bytes` + + .. versionadded:: 0.12 + """ + octet_result = _lib.X509_EXTENSION_get_data(self._extension) + string_result = _ffi.cast("ASN1_STRING*", octet_result) + char_result = _lib.ASN1_STRING_get0_data(string_result) + result_length = _lib.ASN1_STRING_length(string_result) + return _ffi.buffer(char_result, result_length)[:] + + +@deprecated( + "CSR support in pyOpenSSL is deprecated. You should use the APIs " + "in cryptography." +) +class X509Req: + """ + An X.509 certificate signing requests. + + .. deprecated:: 24.2.0 + Use `cryptography.x509.CertificateSigningRequest` instead. + """ + + def __init__(self) -> None: + req = _lib.X509_REQ_new() + self._req = _ffi.gc(req, _lib.X509_REQ_free) + # Default to version 0. + self.set_version(0) + + def to_cryptography(self) -> x509.CertificateSigningRequest: + """ + Export as a ``cryptography`` certificate signing request. + + :rtype: ``cryptography.x509.CertificateSigningRequest`` + + .. versionadded:: 17.1.0 + """ + from cryptography.x509 import load_der_x509_csr + + der = _dump_certificate_request_internal(FILETYPE_ASN1, self) + + return load_der_x509_csr(der) + + @classmethod + def from_cryptography( + cls, crypto_req: x509.CertificateSigningRequest + ) -> X509Req: + """ + Construct based on a ``cryptography`` *crypto_req*. + + :param crypto_req: A ``cryptography`` X.509 certificate signing request + :type crypto_req: ``cryptography.x509.CertificateSigningRequest`` + + :rtype: X509Req + + .. versionadded:: 17.1.0 + """ + if not isinstance(crypto_req, x509.CertificateSigningRequest): + raise TypeError("Must be a certificate signing request") + + from cryptography.hazmat.primitives.serialization import Encoding + + der = crypto_req.public_bytes(Encoding.DER) + return _load_certificate_request_internal(FILETYPE_ASN1, der) + + def set_pubkey(self, pkey: PKey) -> None: + """ + Set the public key of the certificate signing request. + + :param pkey: The public key to use. + :type pkey: :py:class:`PKey` + + :return: ``None`` + """ + set_result = _lib.X509_REQ_set_pubkey(self._req, pkey._pkey) + _openssl_assert(set_result == 1) + + def get_pubkey(self) -> PKey: + """ + Get the public key of the certificate signing request. + + :return: The public key. + :rtype: :py:class:`PKey` + """ + pkey = PKey.__new__(PKey) + pkey._pkey = _lib.X509_REQ_get_pubkey(self._req) + _openssl_assert(pkey._pkey != _ffi.NULL) + pkey._pkey = _ffi.gc(pkey._pkey, _lib.EVP_PKEY_free) + pkey._only_public = True + return pkey + + def set_version(self, version: int) -> None: + """ + Set the version subfield (RFC 2986, section 4.1) of the certificate + request. + + :param int version: The version number. + :return: ``None`` + """ + if not isinstance(version, int): + raise TypeError("version must be an int") + if version != 0: + raise ValueError( + "Invalid version. The only valid version for X509Req is 0." + ) + set_result = _lib.X509_REQ_set_version(self._req, version) + _openssl_assert(set_result == 1) + + def get_version(self) -> int: + """ + Get the version subfield (RFC 2459, section 4.1.2.1) of the certificate + request. + + :return: The value of the version subfield. + :rtype: :py:class:`int` + """ + return _lib.X509_REQ_get_version(self._req) + + def get_subject(self) -> X509Name: + """ + Return the subject of this certificate signing request. + + This creates a new :class:`X509Name` that wraps the underlying subject + name field on the certificate signing request. Modifying it will modify + the underlying signing request, and will have the effect of modifying + any other :class:`X509Name` that refers to this subject. + + :return: The subject of this certificate signing request. + :rtype: :class:`X509Name` + """ + name = X509Name.__new__(X509Name) + name._name = _lib.X509_REQ_get_subject_name(self._req) + _openssl_assert(name._name != _ffi.NULL) + + # The name is owned by the X509Req structure. As long as the X509Name + # Python object is alive, keep the X509Req Python object alive. + name._owner = self + + return name + + def add_extensions(self, extensions: Iterable[X509Extension]) -> None: + """ + Add extensions to the certificate signing request. + + :param extensions: The X.509 extensions to add. + :type extensions: iterable of :py:class:`X509Extension` + :return: ``None`` + """ + warnings.warn( + ( + "This API is deprecated and will be removed in a future " + "version of pyOpenSSL. You should use pyca/cryptography's " + "X.509 APIs instead." + ), + DeprecationWarning, + stacklevel=2, + ) + + stack = _lib.sk_X509_EXTENSION_new_null() + _openssl_assert(stack != _ffi.NULL) + + stack = _ffi.gc(stack, _lib.sk_X509_EXTENSION_free) + + for ext in extensions: + if not isinstance(ext, X509Extension): + raise ValueError("One of the elements is not an X509Extension") + + # TODO push can fail (here and elsewhere) + _lib.sk_X509_EXTENSION_push(stack, ext._extension) + + add_result = _lib.X509_REQ_add_extensions(self._req, stack) + _openssl_assert(add_result == 1) + + def get_extensions(self) -> list[X509Extension]: + """ + Get X.509 extensions in the certificate signing request. + + :return: The X.509 extensions in this request. + :rtype: :py:class:`list` of :py:class:`X509Extension` objects. + + .. versionadded:: 0.15 + """ + warnings.warn( + ( + "This API is deprecated and will be removed in a future " + "version of pyOpenSSL. You should use pyca/cryptography's " + "X.509 APIs instead." + ), + DeprecationWarning, + stacklevel=2, + ) + + exts = [] + native_exts_obj = _lib.X509_REQ_get_extensions(self._req) + native_exts_obj = _ffi.gc( + native_exts_obj, + lambda x: _lib.sk_X509_EXTENSION_pop_free( + x, + _ffi.addressof(_lib._original_lib, "X509_EXTENSION_free"), + ), + ) + + for i in range(_lib.sk_X509_EXTENSION_num(native_exts_obj)): + ext = X509Extension.__new__(X509Extension) + extension = _lib.X509_EXTENSION_dup( + _lib.sk_X509_EXTENSION_value(native_exts_obj, i) + ) + ext._extension = _ffi.gc(extension, _lib.X509_EXTENSION_free) + exts.append(ext) + return exts + + def sign(self, pkey: PKey, digest: str) -> None: + """ + Sign the certificate signing request with this key and digest type. + + :param pkey: The key pair to sign with. + :type pkey: :py:class:`PKey` + :param digest: The name of the message digest to use for the signature, + e.g. :py:data:`"sha256"`. + :type digest: :py:class:`str` + :return: ``None`` + """ + if pkey._only_public: + raise ValueError("Key has only public part") + + if not pkey._initialized: + raise ValueError("Key is uninitialized") + + digest_obj = _lib.EVP_get_digestbyname(_byte_string(digest)) + if digest_obj == _ffi.NULL: + raise ValueError("No such digest method") + + sign_result = _lib.X509_REQ_sign(self._req, pkey._pkey, digest_obj) + _openssl_assert(sign_result > 0) + + def verify(self, pkey: PKey) -> bool: + """ + Verifies the signature on this certificate signing request. + + :param PKey key: A public key. + + :return: ``True`` if the signature is correct. + :rtype: bool + + :raises OpenSSL.crypto.Error: If the signature is invalid or there is a + problem verifying the signature. + """ + if not isinstance(pkey, PKey): + raise TypeError("pkey must be a PKey instance") + + result = _lib.X509_REQ_verify(self._req, pkey._pkey) + if result <= 0: + _raise_current_error() + + return result + + +class X509: + """ + An X.509 certificate. + """ + + def __init__(self) -> None: + x509 = _lib.X509_new() + _openssl_assert(x509 != _ffi.NULL) + self._x509 = _ffi.gc(x509, _lib.X509_free) + + self._issuer_invalidator = _X509NameInvalidator() + self._subject_invalidator = _X509NameInvalidator() + + @classmethod + def _from_raw_x509_ptr(cls, x509: Any) -> X509: + cert = cls.__new__(cls) + cert._x509 = _ffi.gc(x509, _lib.X509_free) + cert._issuer_invalidator = _X509NameInvalidator() + cert._subject_invalidator = _X509NameInvalidator() + return cert + + def to_cryptography(self) -> x509.Certificate: + """ + Export as a ``cryptography`` certificate. + + :rtype: ``cryptography.x509.Certificate`` + + .. versionadded:: 17.1.0 + """ + from cryptography.x509 import load_der_x509_certificate + + der = dump_certificate(FILETYPE_ASN1, self) + return load_der_x509_certificate(der) + + @classmethod + def from_cryptography(cls, crypto_cert: x509.Certificate) -> X509: + """ + Construct based on a ``cryptography`` *crypto_cert*. + + :param crypto_key: A ``cryptography`` X.509 certificate. + :type crypto_key: ``cryptography.x509.Certificate`` + + :rtype: X509 + + .. versionadded:: 17.1.0 + """ + if not isinstance(crypto_cert, x509.Certificate): + raise TypeError("Must be a certificate") + + from cryptography.hazmat.primitives.serialization import Encoding + + der = crypto_cert.public_bytes(Encoding.DER) + return load_certificate(FILETYPE_ASN1, der) + + def set_version(self, version: int) -> None: + """ + Set the version number of the certificate. Note that the + version value is zero-based, eg. a value of 0 is V1. + + :param version: The version number of the certificate. + :type version: :py:class:`int` + + :return: ``None`` + """ + if not isinstance(version, int): + raise TypeError("version must be an integer") + + _openssl_assert(_lib.X509_set_version(self._x509, version) == 1) + + def get_version(self) -> int: + """ + Return the version number of the certificate. + + :return: The version number of the certificate. + :rtype: :py:class:`int` + """ + return _lib.X509_get_version(self._x509) + + def get_pubkey(self) -> PKey: + """ + Get the public key of the certificate. + + :return: The public key. + :rtype: :py:class:`PKey` + """ + pkey = PKey.__new__(PKey) + pkey._pkey = _lib.X509_get_pubkey(self._x509) + if pkey._pkey == _ffi.NULL: + _raise_current_error() + pkey._pkey = _ffi.gc(pkey._pkey, _lib.EVP_PKEY_free) + pkey._only_public = True + return pkey + + def set_pubkey(self, pkey: PKey) -> None: + """ + Set the public key of the certificate. + + :param pkey: The public key. + :type pkey: :py:class:`PKey` + + :return: :py:data:`None` + """ + if not isinstance(pkey, PKey): + raise TypeError("pkey must be a PKey instance") + + set_result = _lib.X509_set_pubkey(self._x509, pkey._pkey) + _openssl_assert(set_result == 1) + + def sign(self, pkey: PKey, digest: str) -> None: + """ + Sign the certificate with this key and digest type. + + :param pkey: The key to sign with. + :type pkey: :py:class:`PKey` + + :param digest: The name of the message digest to use. + :type digest: :py:class:`str` + + :return: :py:data:`None` + """ + if not isinstance(pkey, PKey): + raise TypeError("pkey must be a PKey instance") + + if pkey._only_public: + raise ValueError("Key only has public part") + + if not pkey._initialized: + raise ValueError("Key is uninitialized") + + evp_md = _lib.EVP_get_digestbyname(_byte_string(digest)) + if evp_md == _ffi.NULL: + raise ValueError("No such digest method") + + sign_result = _lib.X509_sign(self._x509, pkey._pkey, evp_md) + _openssl_assert(sign_result > 0) + + def get_signature_algorithm(self) -> bytes: + """ + Return the signature algorithm used in the certificate. + + :return: The name of the algorithm. + :rtype: :py:class:`bytes` + + :raises ValueError: If the signature algorithm is undefined. + + .. versionadded:: 0.13 + """ + sig_alg = _lib.X509_get0_tbs_sigalg(self._x509) + alg = _ffi.new("ASN1_OBJECT **") + _lib.X509_ALGOR_get0(alg, _ffi.NULL, _ffi.NULL, sig_alg) + nid = _lib.OBJ_obj2nid(alg[0]) + if nid == _lib.NID_undef: + raise ValueError("Undefined signature algorithm") + return _ffi.string(_lib.OBJ_nid2ln(nid)) + + def digest(self, digest_name: str) -> bytes: + """ + Return the digest of the X509 object. + + :param digest_name: The name of the digest algorithm to use. + :type digest_name: :py:class:`str` + + :return: The digest of the object, formatted as + :py:const:`b":"`-delimited hex pairs. + :rtype: :py:class:`bytes` + """ + digest = _lib.EVP_get_digestbyname(_byte_string(digest_name)) + if digest == _ffi.NULL: + raise ValueError("No such digest method") + + result_buffer = _ffi.new("unsigned char[]", _lib.EVP_MAX_MD_SIZE) + result_length = _ffi.new("unsigned int[]", 1) + result_length[0] = len(result_buffer) + + digest_result = _lib.X509_digest( + self._x509, digest, result_buffer, result_length + ) + _openssl_assert(digest_result == 1) + + return b":".join( + [ + b16encode(ch).upper() + for ch in _ffi.buffer(result_buffer, result_length[0]) + ] + ) + + def subject_name_hash(self) -> int: + """ + Return the hash of the X509 subject. + + :return: The hash of the subject. + :rtype: :py:class:`int` + """ + return _lib.X509_subject_name_hash(self._x509) + + def set_serial_number(self, serial: int) -> None: + """ + Set the serial number of the certificate. + + :param serial: The new serial number. + :type serial: :py:class:`int` + + :return: :py:data`None` + """ + if not isinstance(serial, int): + raise TypeError("serial must be an integer") + + hex_serial = hex(serial)[2:] + hex_serial_bytes = hex_serial.encode("ascii") + + bignum_serial = _ffi.new("BIGNUM**") + + # BN_hex2bn stores the result in &bignum. + result = _lib.BN_hex2bn(bignum_serial, hex_serial_bytes) + _openssl_assert(result != _ffi.NULL) + + asn1_serial = _lib.BN_to_ASN1_INTEGER(bignum_serial[0], _ffi.NULL) + _lib.BN_free(bignum_serial[0]) + _openssl_assert(asn1_serial != _ffi.NULL) + asn1_serial = _ffi.gc(asn1_serial, _lib.ASN1_INTEGER_free) + set_result = _lib.X509_set_serialNumber(self._x509, asn1_serial) + _openssl_assert(set_result == 1) + + def get_serial_number(self) -> int: + """ + Return the serial number of this certificate. + + :return: The serial number. + :rtype: int + """ + asn1_serial = _lib.X509_get_serialNumber(self._x509) + bignum_serial = _lib.ASN1_INTEGER_to_BN(asn1_serial, _ffi.NULL) + try: + hex_serial = _lib.BN_bn2hex(bignum_serial) + try: + hexstring_serial = _ffi.string(hex_serial) + serial = int(hexstring_serial, 16) + return serial + finally: + _lib.OPENSSL_free(hex_serial) + finally: + _lib.BN_free(bignum_serial) + + def gmtime_adj_notAfter(self, amount: int) -> None: + """ + Adjust the time stamp on which the certificate stops being valid. + + :param int amount: The number of seconds by which to adjust the + timestamp. + :return: ``None`` + """ + if not isinstance(amount, int): + raise TypeError("amount must be an integer") + + notAfter = _lib.X509_getm_notAfter(self._x509) + _lib.X509_gmtime_adj(notAfter, amount) + + def gmtime_adj_notBefore(self, amount: int) -> None: + """ + Adjust the timestamp on which the certificate starts being valid. + + :param amount: The number of seconds by which to adjust the timestamp. + :return: ``None`` + """ + if not isinstance(amount, int): + raise TypeError("amount must be an integer") + + notBefore = _lib.X509_getm_notBefore(self._x509) + _lib.X509_gmtime_adj(notBefore, amount) + + def has_expired(self) -> bool: + """ + Check whether the certificate has expired. + + :return: ``True`` if the certificate has expired, ``False`` otherwise. + :rtype: bool + """ + time_bytes = self.get_notAfter() + if time_bytes is None: + raise ValueError("Unable to determine notAfter") + time_string = time_bytes.decode("utf-8") + not_after = datetime.datetime.strptime(time_string, "%Y%m%d%H%M%SZ") + + UTC = datetime.timezone.utc + utcnow = datetime.datetime.now(UTC).replace(tzinfo=None) + return not_after < utcnow + + def _get_boundary_time(self, which: Any) -> bytes | None: + return _get_asn1_time(which(self._x509)) + + def get_notBefore(self) -> bytes | None: + """ + Get the timestamp at which the certificate starts being valid. + + The timestamp is formatted as an ASN.1 TIME:: + + YYYYMMDDhhmmssZ + + :return: A timestamp string, or ``None`` if there is none. + :rtype: bytes or NoneType + """ + return self._get_boundary_time(_lib.X509_getm_notBefore) + + def _set_boundary_time( + self, which: Callable[..., Any], when: bytes + ) -> None: + return _set_asn1_time(which(self._x509), when) + + def set_notBefore(self, when: bytes) -> None: + """ + Set the timestamp at which the certificate starts being valid. + + The timestamp is formatted as an ASN.1 TIME:: + + YYYYMMDDhhmmssZ + + :param bytes when: A timestamp string. + :return: ``None`` + """ + return self._set_boundary_time(_lib.X509_getm_notBefore, when) + + def get_notAfter(self) -> bytes | None: + """ + Get the timestamp at which the certificate stops being valid. + + The timestamp is formatted as an ASN.1 TIME:: + + YYYYMMDDhhmmssZ + + :return: A timestamp string, or ``None`` if there is none. + :rtype: bytes or NoneType + """ + return self._get_boundary_time(_lib.X509_getm_notAfter) + + def set_notAfter(self, when: bytes) -> None: + """ + Set the timestamp at which the certificate stops being valid. + + The timestamp is formatted as an ASN.1 TIME:: + + YYYYMMDDhhmmssZ + + :param bytes when: A timestamp string. + :return: ``None`` + """ + return self._set_boundary_time(_lib.X509_getm_notAfter, when) + + def _get_name(self, which: Any) -> X509Name: + name = X509Name.__new__(X509Name) + name._name = which(self._x509) + _openssl_assert(name._name != _ffi.NULL) + + # The name is owned by the X509 structure. As long as the X509Name + # Python object is alive, keep the X509 Python object alive. + name._owner = self + + return name + + def _set_name(self, which: Any, name: X509Name) -> None: + if not isinstance(name, X509Name): + raise TypeError("name must be an X509Name") + set_result = which(self._x509, name._name) + _openssl_assert(set_result == 1) + + def get_issuer(self) -> X509Name: + """ + Return the issuer of this certificate. + + This creates a new :class:`X509Name` that wraps the underlying issuer + name field on the certificate. Modifying it will modify the underlying + certificate, and will have the effect of modifying any other + :class:`X509Name` that refers to this issuer. + + :return: The issuer of this certificate. + :rtype: :class:`X509Name` + """ + name = self._get_name(_lib.X509_get_issuer_name) + self._issuer_invalidator.add(name) + return name + + def set_issuer(self, issuer: X509Name) -> None: + """ + Set the issuer of this certificate. + + :param issuer: The issuer. + :type issuer: :py:class:`X509Name` + + :return: ``None`` + """ + self._set_name(_lib.X509_set_issuer_name, issuer) + self._issuer_invalidator.clear() + + def get_subject(self) -> X509Name: + """ + Return the subject of this certificate. + + This creates a new :class:`X509Name` that wraps the underlying subject + name field on the certificate. Modifying it will modify the underlying + certificate, and will have the effect of modifying any other + :class:`X509Name` that refers to this subject. + + :return: The subject of this certificate. + :rtype: :class:`X509Name` + """ + name = self._get_name(_lib.X509_get_subject_name) + self._subject_invalidator.add(name) + return name + + def set_subject(self, subject: X509Name) -> None: + """ + Set the subject of this certificate. + + :param subject: The subject. + :type subject: :py:class:`X509Name` + + :return: ``None`` + """ + self._set_name(_lib.X509_set_subject_name, subject) + self._subject_invalidator.clear() + + def get_extension_count(self) -> int: + """ + Get the number of extensions on this certificate. + + :return: The number of extensions. + :rtype: :py:class:`int` + + .. versionadded:: 0.12 + """ + return _lib.X509_get_ext_count(self._x509) + + def add_extensions(self, extensions: Iterable[X509Extension]) -> None: + """ + Add extensions to the certificate. + + :param extensions: The extensions to add. + :type extensions: An iterable of :py:class:`X509Extension` objects. + :return: ``None`` + """ + warnings.warn( + ( + "This API is deprecated and will be removed in a future " + "version of pyOpenSSL. You should use pyca/cryptography's " + "X.509 APIs instead." + ), + DeprecationWarning, + stacklevel=2, + ) + + for ext in extensions: + if not isinstance(ext, X509Extension): + raise ValueError("One of the elements is not an X509Extension") + + add_result = _lib.X509_add_ext(self._x509, ext._extension, -1) + _openssl_assert(add_result == 1) + + def get_extension(self, index: int) -> X509Extension: + """ + Get a specific extension of the certificate by index. + + Extensions on a certificate are kept in order. The index + parameter selects which extension will be returned. + + :param int index: The index of the extension to retrieve. + :return: The extension at the specified index. + :rtype: :py:class:`X509Extension` + :raises IndexError: If the extension index was out of bounds. + + .. versionadded:: 0.12 + """ + warnings.warn( + ( + "This API is deprecated and will be removed in a future " + "version of pyOpenSSL. You should use pyca/cryptography's " + "X.509 APIs instead." + ), + DeprecationWarning, + stacklevel=2, + ) + + ext = X509Extension.__new__(X509Extension) + ext._extension = _lib.X509_get_ext(self._x509, index) + if ext._extension == _ffi.NULL: + raise IndexError("extension index out of bounds") + + extension = _lib.X509_EXTENSION_dup(ext._extension) + ext._extension = _ffi.gc(extension, _lib.X509_EXTENSION_free) + return ext + + +class X509StoreFlags: + """ + Flags for X509 verification, used to change the behavior of + :class:`X509Store`. + + See `OpenSSL Verification Flags`_ for details. + + .. _OpenSSL Verification Flags: + https://www.openssl.org/docs/manmaster/man3/X509_VERIFY_PARAM_set_flags.html + """ + + CRL_CHECK: int = _lib.X509_V_FLAG_CRL_CHECK + CRL_CHECK_ALL: int = _lib.X509_V_FLAG_CRL_CHECK_ALL + IGNORE_CRITICAL: int = _lib.X509_V_FLAG_IGNORE_CRITICAL + X509_STRICT: int = _lib.X509_V_FLAG_X509_STRICT + ALLOW_PROXY_CERTS: int = _lib.X509_V_FLAG_ALLOW_PROXY_CERTS + POLICY_CHECK: int = _lib.X509_V_FLAG_POLICY_CHECK + EXPLICIT_POLICY: int = _lib.X509_V_FLAG_EXPLICIT_POLICY + INHIBIT_MAP: int = _lib.X509_V_FLAG_INHIBIT_MAP + CHECK_SS_SIGNATURE: int = _lib.X509_V_FLAG_CHECK_SS_SIGNATURE + PARTIAL_CHAIN: int = _lib.X509_V_FLAG_PARTIAL_CHAIN + + +class X509Store: + """ + An X.509 store. + + An X.509 store is used to describe a context in which to verify a + certificate. A description of a context may include a set of certificates + to trust, a set of certificate revocation lists, verification flags and + more. + + An X.509 store, being only a description, cannot be used by itself to + verify a certificate. To carry out the actual verification process, see + :class:`X509StoreContext`. + """ + + def __init__(self) -> None: + store = _lib.X509_STORE_new() + self._store = _ffi.gc(store, _lib.X509_STORE_free) + + def add_cert(self, cert: X509) -> None: + """ + Adds a trusted certificate to this store. + + Adding a certificate with this method adds this certificate as a + *trusted* certificate. + + :param X509 cert: The certificate to add to this store. + + :raises TypeError: If the certificate is not an :class:`X509`. + + :raises OpenSSL.crypto.Error: If OpenSSL was unhappy with your + certificate. + + :return: ``None`` if the certificate was added successfully. + """ + if not isinstance(cert, X509): + raise TypeError() + + res = _lib.X509_STORE_add_cert(self._store, cert._x509) + _openssl_assert(res == 1) + + def add_crl(self, crl: x509.CertificateRevocationList) -> None: + """ + Add a certificate revocation list to this store. + + The certificate revocation lists added to a store will only be used if + the associated flags are configured to check certificate revocation + lists. + + .. versionadded:: 16.1.0 + + :param crl: The certificate revocation list to add to this store. + :type crl: ``cryptography.x509.CertificateRevocationList`` + :return: ``None`` if the certificate revocation list was added + successfully. + """ + if isinstance(crl, x509.CertificateRevocationList): + from cryptography.hazmat.primitives.serialization import Encoding + + bio = _new_mem_buf(crl.public_bytes(Encoding.DER)) + openssl_crl = _lib.d2i_X509_CRL_bio(bio, _ffi.NULL) + _openssl_assert(openssl_crl != _ffi.NULL) + crl = _ffi.gc(openssl_crl, _lib.X509_CRL_free) + else: + raise TypeError( + "CRL must be of type " + "cryptography.x509.CertificateRevocationList" + ) + + _openssl_assert(_lib.X509_STORE_add_crl(self._store, crl) != 0) + + def set_flags(self, flags: int) -> None: + """ + Set verification flags to this store. + + Verification flags can be combined by oring them together. + + .. note:: + + Setting a verification flag sometimes requires clients to add + additional information to the store, otherwise a suitable error will + be raised. + + For example, in setting flags to enable CRL checking a + suitable CRL must be added to the store otherwise an error will be + raised. + + .. versionadded:: 16.1.0 + + :param int flags: The verification flags to set on this store. + See :class:`X509StoreFlags` for available constants. + :return: ``None`` if the verification flags were successfully set. + """ + _openssl_assert(_lib.X509_STORE_set_flags(self._store, flags) != 0) + + def set_time(self, vfy_time: datetime.datetime) -> None: + """ + Set the time against which the certificates are verified. + + Normally the current time is used. + + .. note:: + + For example, you can determine if a certificate was valid at a given + time. + + .. versionadded:: 17.0.0 + + :param datetime vfy_time: The verification time to set on this store. + :return: ``None`` if the verification time was successfully set. + """ + param = _lib.X509_VERIFY_PARAM_new() + param = _ffi.gc(param, _lib.X509_VERIFY_PARAM_free) + + _lib.X509_VERIFY_PARAM_set_time( + param, calendar.timegm(vfy_time.timetuple()) + ) + _openssl_assert(_lib.X509_STORE_set1_param(self._store, param) != 0) + + def load_locations( + self, + cafile: StrOrBytesPath | None, + capath: StrOrBytesPath | None = None, + ) -> None: + """ + Let X509Store know where we can find trusted certificates for the + certificate chain. Note that the certificates have to be in PEM + format. + + If *capath* is passed, it must be a directory prepared using the + ``c_rehash`` tool included with OpenSSL. Either, but not both, of + *cafile* or *capath* may be ``None``. + + .. note:: + + Both *cafile* and *capath* may be set simultaneously. + + Call this method multiple times to add more than one location. + For example, CA certificates, and certificate revocation list bundles + may be passed in *cafile* in subsequent calls to this method. + + .. versionadded:: 20.0 + + :param cafile: In which file we can find the certificates (``bytes`` or + ``unicode``). + :param capath: In which directory we can find the certificates + (``bytes`` or ``unicode``). + + :return: ``None`` if the locations were set successfully. + + :raises OpenSSL.crypto.Error: If both *cafile* and *capath* is ``None`` + or the locations could not be set for any reason. + + """ + if cafile is None: + cafile = _ffi.NULL + else: + cafile = _path_bytes(cafile) + + if capath is None: + capath = _ffi.NULL + else: + capath = _path_bytes(capath) + + load_result = _lib.X509_STORE_load_locations( + self._store, cafile, capath + ) + if not load_result: + _raise_current_error() + + +class X509StoreContextError(Exception): + """ + An exception raised when an error occurred while verifying a certificate + using `OpenSSL.X509StoreContext.verify_certificate`. + + :ivar certificate: The certificate which caused verificate failure. + :type certificate: :class:`X509` + """ + + def __init__( + self, message: str, errors: list[Any], certificate: X509 + ) -> None: + super().__init__(message) + self.errors = errors + self.certificate = certificate + + +class X509StoreContext: + """ + An X.509 store context. + + An X.509 store context is used to carry out the actual verification process + of a certificate in a described context. For describing such a context, see + :class:`X509Store`. + + :param X509Store store: The certificates which will be trusted for the + purposes of any verifications. + :param X509 certificate: The certificate to be verified. + :param chain: List of untrusted certificates that may be used for building + the certificate chain. May be ``None``. + :type chain: :class:`list` of :class:`X509` + """ + + def __init__( + self, + store: X509Store, + certificate: X509, + chain: Sequence[X509] | None = None, + ) -> None: + self._store = store + self._cert = certificate + self._chain = self._build_certificate_stack(chain) + + @staticmethod + def _build_certificate_stack( + certificates: Sequence[X509] | None, + ) -> None: + def cleanup(s: Any) -> None: + # Equivalent to sk_X509_pop_free, but we don't + # currently have a CFFI binding for that available + for i in range(_lib.sk_X509_num(s)): + x = _lib.sk_X509_value(s, i) + _lib.X509_free(x) + _lib.sk_X509_free(s) + + if certificates is None or len(certificates) == 0: + return _ffi.NULL + + stack = _lib.sk_X509_new_null() + _openssl_assert(stack != _ffi.NULL) + stack = _ffi.gc(stack, cleanup) + + for cert in certificates: + if not isinstance(cert, X509): + raise TypeError("One of the elements is not an X509 instance") + + _openssl_assert(_lib.X509_up_ref(cert._x509) > 0) + if _lib.sk_X509_push(stack, cert._x509) <= 0: + _lib.X509_free(cert._x509) + _raise_current_error() + + return stack + + @staticmethod + def _exception_from_context(store_ctx: Any) -> X509StoreContextError: + """ + Convert an OpenSSL native context error failure into a Python + exception. + + When a call to native OpenSSL X509_verify_cert fails, additional + information about the failure can be obtained from the store context. + """ + message = _ffi.string( + _lib.X509_verify_cert_error_string( + _lib.X509_STORE_CTX_get_error(store_ctx) + ) + ).decode("utf-8") + errors = [ + _lib.X509_STORE_CTX_get_error(store_ctx), + _lib.X509_STORE_CTX_get_error_depth(store_ctx), + message, + ] + # A context error should always be associated with a certificate, so we + # expect this call to never return :class:`None`. + _x509 = _lib.X509_STORE_CTX_get_current_cert(store_ctx) + _cert = _lib.X509_dup(_x509) + pycert = X509._from_raw_x509_ptr(_cert) + return X509StoreContextError(message, errors, pycert) + + def _verify_certificate(self) -> Any: + """ + Verifies the certificate and runs an X509_STORE_CTX containing the + results. + + :raises X509StoreContextError: If an error occurred when validating a + certificate in the context. Sets ``certificate`` attribute to + indicate which certificate caused the error. + """ + store_ctx = _lib.X509_STORE_CTX_new() + _openssl_assert(store_ctx != _ffi.NULL) + store_ctx = _ffi.gc(store_ctx, _lib.X509_STORE_CTX_free) + + ret = _lib.X509_STORE_CTX_init( + store_ctx, self._store._store, self._cert._x509, self._chain + ) + _openssl_assert(ret == 1) + + ret = _lib.X509_verify_cert(store_ctx) + if ret <= 0: + raise self._exception_from_context(store_ctx) + + return store_ctx + + def set_store(self, store: X509Store) -> None: + """ + Set the context's X.509 store. + + .. versionadded:: 0.15 + + :param X509Store store: The store description which will be used for + the purposes of any *future* verifications. + """ + self._store = store + + def verify_certificate(self) -> None: + """ + Verify a certificate in a context. + + .. versionadded:: 0.15 + + :raises X509StoreContextError: If an error occurred when validating a + certificate in the context. Sets ``certificate`` attribute to + indicate which certificate caused the error. + """ + self._verify_certificate() + + def get_verified_chain(self) -> list[X509]: + """ + Verify a certificate in a context and return the complete validated + chain. + + :raises X509StoreContextError: If an error occurred when validating a + certificate in the context. Sets ``certificate`` attribute to + indicate which certificate caused the error. + + .. versionadded:: 20.0 + """ + store_ctx = self._verify_certificate() + + # Note: X509_STORE_CTX_get1_chain returns a deep copy of the chain. + cert_stack = _lib.X509_STORE_CTX_get1_chain(store_ctx) + _openssl_assert(cert_stack != _ffi.NULL) + + result = [] + for i in range(_lib.sk_X509_num(cert_stack)): + cert = _lib.sk_X509_value(cert_stack, i) + _openssl_assert(cert != _ffi.NULL) + pycert = X509._from_raw_x509_ptr(cert) + result.append(pycert) + + # Free the stack but not the members which are freed by the X509 class. + _lib.sk_X509_free(cert_stack) + return result + + +def load_certificate(type: int, buffer: bytes) -> X509: + """ + Load a certificate (X509) from the string *buffer* encoded with the + type *type*. + + :param type: The file type (one of FILETYPE_PEM, FILETYPE_ASN1) + + :param bytes buffer: The buffer the certificate is stored in + + :return: The X509 object + """ + if isinstance(buffer, str): + buffer = buffer.encode("ascii") + + bio = _new_mem_buf(buffer) + + if type == FILETYPE_PEM: + x509 = _lib.PEM_read_bio_X509(bio, _ffi.NULL, _ffi.NULL, _ffi.NULL) + elif type == FILETYPE_ASN1: + x509 = _lib.d2i_X509_bio(bio, _ffi.NULL) + else: + raise ValueError("type argument must be FILETYPE_PEM or FILETYPE_ASN1") + + if x509 == _ffi.NULL: + _raise_current_error() + + return X509._from_raw_x509_ptr(x509) + + +def dump_certificate(type: int, cert: X509) -> bytes: + """ + Dump the certificate *cert* into a buffer string encoded with the type + *type*. + + :param type: The file type (one of FILETYPE_PEM, FILETYPE_ASN1, or + FILETYPE_TEXT) + :param cert: The certificate to dump + :return: The buffer with the dumped certificate in + """ + bio = _new_mem_buf() + + if type == FILETYPE_PEM: + result_code = _lib.PEM_write_bio_X509(bio, cert._x509) + elif type == FILETYPE_ASN1: + result_code = _lib.i2d_X509_bio(bio, cert._x509) + elif type == FILETYPE_TEXT: + result_code = _lib.X509_print_ex(bio, cert._x509, 0, 0) + else: + raise ValueError( + "type argument must be FILETYPE_PEM, FILETYPE_ASN1, or " + "FILETYPE_TEXT" + ) + + _openssl_assert(result_code == 1) + return _bio_to_string(bio) + + +def dump_publickey(type: int, pkey: PKey) -> bytes: + """ + Dump a public key to a buffer. + + :param type: The file type (one of :data:`FILETYPE_PEM` or + :data:`FILETYPE_ASN1`). + :param PKey pkey: The public key to dump + :return: The buffer with the dumped key in it. + :rtype: bytes + """ + bio = _new_mem_buf() + if type == FILETYPE_PEM: + write_bio = _lib.PEM_write_bio_PUBKEY + elif type == FILETYPE_ASN1: + write_bio = _lib.i2d_PUBKEY_bio + else: + raise ValueError("type argument must be FILETYPE_PEM or FILETYPE_ASN1") + + result_code = write_bio(bio, pkey._pkey) + if result_code != 1: # pragma: no cover + _raise_current_error() + + return _bio_to_string(bio) + + +def dump_privatekey( + type: int, + pkey: PKey, + cipher: str | None = None, + passphrase: PassphraseCallableT | None = None, +) -> bytes: + """ + Dump the private key *pkey* into a buffer string encoded with the type + *type*. Optionally (if *type* is :const:`FILETYPE_PEM`) encrypting it + using *cipher* and *passphrase*. + + :param type: The file type (one of :const:`FILETYPE_PEM`, + :const:`FILETYPE_ASN1`, or :const:`FILETYPE_TEXT`) + :param PKey pkey: The PKey to dump + :param cipher: (optional) if encrypted PEM format, the cipher to use + :param passphrase: (optional) if encrypted PEM format, this can be either + the passphrase to use, or a callback for providing the passphrase. + + :return: The buffer with the dumped key in + :rtype: bytes + """ + bio = _new_mem_buf() + + if not isinstance(pkey, PKey): + raise TypeError("pkey must be a PKey") + + if cipher is not None: + if passphrase is None: + raise TypeError( + "if a value is given for cipher " + "one must also be given for passphrase" + ) + cipher_obj = _lib.EVP_get_cipherbyname(_byte_string(cipher)) + if cipher_obj == _ffi.NULL: + raise ValueError("Invalid cipher name") + else: + cipher_obj = _ffi.NULL + + helper = _PassphraseHelper(type, passphrase) + if type == FILETYPE_PEM: + result_code = _lib.PEM_write_bio_PrivateKey( + bio, + pkey._pkey, + cipher_obj, + _ffi.NULL, + 0, + helper.callback, + helper.callback_args, + ) + helper.raise_if_problem() + elif type == FILETYPE_ASN1: + result_code = _lib.i2d_PrivateKey_bio(bio, pkey._pkey) + elif type == FILETYPE_TEXT: + if _lib.EVP_PKEY_id(pkey._pkey) != _lib.EVP_PKEY_RSA: + raise TypeError("Only RSA keys are supported for FILETYPE_TEXT") + + rsa = _ffi.gc(_lib.EVP_PKEY_get1_RSA(pkey._pkey), _lib.RSA_free) + result_code = _lib.RSA_print(bio, rsa, 0) + else: + raise ValueError( + "type argument must be FILETYPE_PEM, FILETYPE_ASN1, or " + "FILETYPE_TEXT" + ) + + _openssl_assert(result_code != 0) + + return _bio_to_string(bio) + + +class _PassphraseHelper: + def __init__( + self, + type: int, + passphrase: PassphraseCallableT | None, + more_args: bool = False, + truncate: bool = False, + ) -> None: + if type != FILETYPE_PEM and passphrase is not None: + raise ValueError( + "only FILETYPE_PEM key format supports encryption" + ) + self._passphrase = passphrase + self._more_args = more_args + self._truncate = truncate + self._problems: list[Exception] = [] + + @property + def callback(self) -> Any: + if self._passphrase is None: + return _ffi.NULL + elif isinstance(self._passphrase, bytes) or callable(self._passphrase): + return _ffi.callback("pem_password_cb", self._read_passphrase) + else: + raise TypeError( + "Last argument must be a byte string or a callable." + ) + + @property + def callback_args(self) -> Any: + if self._passphrase is None: + return _ffi.NULL + elif isinstance(self._passphrase, bytes) or callable(self._passphrase): + return _ffi.NULL + else: + raise TypeError( + "Last argument must be a byte string or a callable." + ) + + def raise_if_problem(self, exceptionType: type[Exception] = Error) -> None: + if self._problems: + # Flush the OpenSSL error queue + try: + _exception_from_error_queue(exceptionType) + except exceptionType: + pass + + raise self._problems.pop(0) + + def _read_passphrase( + self, buf: Any, size: int, rwflag: Any, userdata: Any + ) -> int: + try: + if callable(self._passphrase): + if self._more_args: + result = self._passphrase(size, rwflag, userdata) + else: + result = self._passphrase(rwflag) + else: + assert self._passphrase is not None + result = self._passphrase + if not isinstance(result, bytes): + raise ValueError("Bytes expected") + if len(result) > size: + if self._truncate: + result = result[:size] + else: + raise ValueError( + "passphrase returned by callback is too long" + ) + for i in range(len(result)): + buf[i] = result[i : i + 1] + return len(result) + except Exception as e: + self._problems.append(e) + return 0 + + +def load_publickey(type: int, buffer: str | bytes) -> PKey: + """ + Load a public key from a buffer. + + :param type: The file type (one of :data:`FILETYPE_PEM`, + :data:`FILETYPE_ASN1`). + :param buffer: The buffer the key is stored in. + :type buffer: A Python string object, either unicode or bytestring. + :return: The PKey object. + :rtype: :class:`PKey` + """ + if isinstance(buffer, str): + buffer = buffer.encode("ascii") + + bio = _new_mem_buf(buffer) + + if type == FILETYPE_PEM: + evp_pkey = _lib.PEM_read_bio_PUBKEY( + bio, _ffi.NULL, _ffi.NULL, _ffi.NULL + ) + elif type == FILETYPE_ASN1: + evp_pkey = _lib.d2i_PUBKEY_bio(bio, _ffi.NULL) + else: + raise ValueError("type argument must be FILETYPE_PEM or FILETYPE_ASN1") + + if evp_pkey == _ffi.NULL: + _raise_current_error() + + pkey = PKey.__new__(PKey) + pkey._pkey = _ffi.gc(evp_pkey, _lib.EVP_PKEY_free) + pkey._only_public = True + return pkey + + +def load_privatekey( + type: int, + buffer: str | bytes, + passphrase: PassphraseCallableT | None = None, +) -> PKey: + """ + Load a private key (PKey) from the string *buffer* encoded with the type + *type*. + + :param type: The file type (one of FILETYPE_PEM, FILETYPE_ASN1) + :param buffer: The buffer the key is stored in + :param passphrase: (optional) if encrypted PEM format, this can be + either the passphrase to use, or a callback for + providing the passphrase. + + :return: The PKey object + """ + if isinstance(buffer, str): + buffer = buffer.encode("ascii") + + bio = _new_mem_buf(buffer) + + helper = _PassphraseHelper(type, passphrase) + if type == FILETYPE_PEM: + evp_pkey = _lib.PEM_read_bio_PrivateKey( + bio, _ffi.NULL, helper.callback, helper.callback_args + ) + helper.raise_if_problem() + elif type == FILETYPE_ASN1: + evp_pkey = _lib.d2i_PrivateKey_bio(bio, _ffi.NULL) + else: + raise ValueError("type argument must be FILETYPE_PEM or FILETYPE_ASN1") + + if evp_pkey == _ffi.NULL: + _raise_current_error() + + pkey = PKey.__new__(PKey) + pkey._pkey = _ffi.gc(evp_pkey, _lib.EVP_PKEY_free) + return pkey + + +def dump_certificate_request(type: int, req: X509Req) -> bytes: + """ + Dump the certificate request *req* into a buffer string encoded with the + type *type*. + + :param type: The file type (one of FILETYPE_PEM, FILETYPE_ASN1) + :param req: The certificate request to dump + :return: The buffer with the dumped certificate request in + + + .. deprecated:: 24.2.0 + Use `cryptography.x509.CertificateSigningRequest` instead. + """ + bio = _new_mem_buf() + + if type == FILETYPE_PEM: + result_code = _lib.PEM_write_bio_X509_REQ(bio, req._req) + elif type == FILETYPE_ASN1: + result_code = _lib.i2d_X509_REQ_bio(bio, req._req) + elif type == FILETYPE_TEXT: + result_code = _lib.X509_REQ_print_ex(bio, req._req, 0, 0) + else: + raise ValueError( + "type argument must be FILETYPE_PEM, FILETYPE_ASN1, or " + "FILETYPE_TEXT" + ) + + _openssl_assert(result_code != 0) + + return _bio_to_string(bio) + + +_dump_certificate_request_internal = dump_certificate_request + +utils.deprecated( + dump_certificate_request, + __name__, + ( + "CSR support in pyOpenSSL is deprecated. You should use the APIs " + "in cryptography." + ), + DeprecationWarning, + name="dump_certificate_request", +) + + +def load_certificate_request(type: int, buffer: bytes) -> X509Req: + """ + Load a certificate request (X509Req) from the string *buffer* encoded with + the type *type*. + + :param type: The file type (one of FILETYPE_PEM, FILETYPE_ASN1) + :param buffer: The buffer the certificate request is stored in + :return: The X509Req object + + .. deprecated:: 24.2.0 + Use `cryptography.x509.load_der_x509_csr` or + `cryptography.x509.load_pem_x509_csr` instead. + """ + if isinstance(buffer, str): + buffer = buffer.encode("ascii") + + bio = _new_mem_buf(buffer) + + if type == FILETYPE_PEM: + req = _lib.PEM_read_bio_X509_REQ(bio, _ffi.NULL, _ffi.NULL, _ffi.NULL) + elif type == FILETYPE_ASN1: + req = _lib.d2i_X509_REQ_bio(bio, _ffi.NULL) + else: + raise ValueError("type argument must be FILETYPE_PEM or FILETYPE_ASN1") + + _openssl_assert(req != _ffi.NULL) + + x509req = X509Req.__new__(X509Req) + x509req._req = _ffi.gc(req, _lib.X509_REQ_free) + return x509req + + +_load_certificate_request_internal = load_certificate_request + +utils.deprecated( + load_certificate_request, + __name__, + ( + "CSR support in pyOpenSSL is deprecated. You should use the APIs " + "in cryptography." + ), + DeprecationWarning, + name="load_certificate_request", +) diff --git a/venv/lib/python3.12/site-packages/OpenSSL/debug.py b/venv/lib/python3.12/site-packages/OpenSSL/debug.py new file mode 100644 index 00000000..e0ed3f81 --- /dev/null +++ b/venv/lib/python3.12/site-packages/OpenSSL/debug.py @@ -0,0 +1,40 @@ +import ssl +import sys + +import cffi +import cryptography + +import OpenSSL.SSL + +from . import version + +_env_info = """\ +pyOpenSSL: {pyopenssl} +cryptography: {cryptography} +cffi: {cffi} +cryptography's compiled against OpenSSL: {crypto_openssl_compile} +cryptography's linked OpenSSL: {crypto_openssl_link} +Python's OpenSSL: {python_openssl} +Python executable: {python} +Python version: {python_version} +Platform: {platform} +sys.path: {sys_path}""".format( + pyopenssl=version.__version__, + crypto_openssl_compile=OpenSSL._util.ffi.string( + OpenSSL._util.lib.OPENSSL_VERSION_TEXT, + ).decode("ascii"), + crypto_openssl_link=OpenSSL.SSL.SSLeay_version( + OpenSSL.SSL.SSLEAY_VERSION + ).decode("ascii"), + python_openssl=getattr(ssl, "OPENSSL_VERSION", "n/a"), + cryptography=cryptography.__version__, + cffi=cffi.__version__, + python=sys.executable, + python_version=sys.version, + platform=sys.platform, + sys_path=sys.path, +) + + +if __name__ == "__main__": + print(_env_info) diff --git a/venv/lib/python3.12/site-packages/OpenSSL/py.typed b/venv/lib/python3.12/site-packages/OpenSSL/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/venv/lib/python3.12/site-packages/OpenSSL/rand.py b/venv/lib/python3.12/site-packages/OpenSSL/rand.py new file mode 100644 index 00000000..e57425f3 --- /dev/null +++ b/venv/lib/python3.12/site-packages/OpenSSL/rand.py @@ -0,0 +1,50 @@ +""" +PRNG management routines, thin wrappers. +""" + +from __future__ import annotations + +import warnings + +from OpenSSL._util import lib as _lib + +warnings.warn( + "OpenSSL.rand is deprecated - you should use os.urandom instead", + DeprecationWarning, + stacklevel=3, +) + + +def add(buffer: bytes, entropy: int) -> None: + """ + Mix bytes from *string* into the PRNG state. + + The *entropy* argument is (the lower bound of) an estimate of how much + randomness is contained in *string*, measured in bytes. + + For more information, see e.g. :rfc:`1750`. + + This function is only relevant if you are forking Python processes and + need to reseed the CSPRNG after fork. + + :param buffer: Buffer with random data. + :param entropy: The entropy (in bytes) measurement of the buffer. + + :return: :obj:`None` + """ + if not isinstance(buffer, bytes): + raise TypeError("buffer must be a byte string") + + if not isinstance(entropy, int): + raise TypeError("entropy must be an integer") + + _lib.RAND_add(buffer, len(buffer), entropy) + + +def status() -> int: + """ + Check whether the PRNG has been seeded with enough data. + + :return: 1 if the PRNG is seeded enough, 0 otherwise. + """ + return _lib.RAND_status() diff --git a/venv/lib/python3.12/site-packages/OpenSSL/version.py b/venv/lib/python3.12/site-packages/OpenSSL/version.py new file mode 100644 index 00000000..c49055e1 --- /dev/null +++ b/venv/lib/python3.12/site-packages/OpenSSL/version.py @@ -0,0 +1,28 @@ +# Copyright (C) AB Strakt +# Copyright (C) Jean-Paul Calderone +# See LICENSE for details. + +""" +pyOpenSSL - A simple wrapper around the OpenSSL library +""" + +__all__ = [ + "__author__", + "__copyright__", + "__email__", + "__license__", + "__summary__", + "__title__", + "__uri__", + "__version__", +] + +__version__ = "25.3.0" + +__title__ = "pyOpenSSL" +__uri__ = "https://pyopenssl.org/" +__summary__ = "Python wrapper module around the OpenSSL library" +__author__ = "The pyOpenSSL developers" +__email__ = "cryptography-dev@python.org" +__license__ = "Apache License, Version 2.0" +__copyright__ = f"Copyright 2001-2025 {__author__}" diff --git a/venv/lib/python3.12/site-packages/__pycache__/typing_extensions.cpython-312.pyc b/venv/lib/python3.12/site-packages/__pycache__/typing_extensions.cpython-312.pyc new file mode 100644 index 00000000..546b04d5 Binary files /dev/null and b/venv/lib/python3.12/site-packages/__pycache__/typing_extensions.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/_cffi_backend.cpython-312-x86_64-linux-gnu.so b/venv/lib/python3.12/site-packages/_cffi_backend.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 00000000..156ee431 Binary files /dev/null and b/venv/lib/python3.12/site-packages/_cffi_backend.cpython-312-x86_64-linux-gnu.so differ diff --git a/venv/lib/python3.12/site-packages/_yaml/__init__.py b/venv/lib/python3.12/site-packages/_yaml/__init__.py new file mode 100644 index 00000000..7baa8c4b --- /dev/null +++ b/venv/lib/python3.12/site-packages/_yaml/__init__.py @@ -0,0 +1,33 @@ +# This is a stub package designed to roughly emulate the _yaml +# extension module, which previously existed as a standalone module +# and has been moved into the `yaml` package namespace. +# It does not perfectly mimic its old counterpart, but should get +# close enough for anyone who's relying on it even when they shouldn't. +import yaml + +# in some circumstances, the yaml module we imoprted may be from a different version, so we need +# to tread carefully when poking at it here (it may not have the attributes we expect) +if not getattr(yaml, '__with_libyaml__', False): + from sys import version_info + + exc = ModuleNotFoundError if version_info >= (3, 6) else ImportError + raise exc("No module named '_yaml'") +else: + from yaml._yaml import * + import warnings + warnings.warn( + 'The _yaml extension module is now located at yaml._yaml' + ' and its location is subject to change. To use the' + ' LibYAML-based parser and emitter, import from `yaml`:' + ' `from yaml import CLoader as Loader, CDumper as Dumper`.', + DeprecationWarning + ) + del warnings + # Don't `del yaml` here because yaml is actually an existing + # namespace member of _yaml. + +__name__ = '_yaml' +# If the module is top-level (i.e. not a part of any specific package) +# then the attribute should be set to ''. +# https://docs.python.org/3.8/library/types.html +__package__ = '' diff --git a/venv/lib/python3.12/site-packages/_yaml/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/_yaml/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..33ac8e02 Binary files /dev/null and b/venv/lib/python3.12/site-packages/_yaml/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/blinker/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/blinker/__pycache__/__init__.cpython-312.pyc index 48335193..cde6662f 100644 Binary files a/venv/lib/python3.12/site-packages/blinker/__pycache__/__init__.cpython-312.pyc and b/venv/lib/python3.12/site-packages/blinker/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/blinker/__pycache__/_utilities.cpython-312.pyc b/venv/lib/python3.12/site-packages/blinker/__pycache__/_utilities.cpython-312.pyc index 238f27c5..2585c36a 100644 Binary files a/venv/lib/python3.12/site-packages/blinker/__pycache__/_utilities.cpython-312.pyc and b/venv/lib/python3.12/site-packages/blinker/__pycache__/_utilities.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/blinker/__pycache__/base.cpython-312.pyc b/venv/lib/python3.12/site-packages/blinker/__pycache__/base.cpython-312.pyc index 42687b3a..eb9139e9 100644 Binary files a/venv/lib/python3.12/site-packages/blinker/__pycache__/base.cpython-312.pyc and b/venv/lib/python3.12/site-packages/blinker/__pycache__/base.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/click-8.3.0.dist-info/INSTALLER b/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/INSTALLER similarity index 100% rename from venv/lib/python3.12/site-packages/click-8.3.0.dist-info/INSTALLER rename to venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/INSTALLER diff --git a/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/METADATA b/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/METADATA new file mode 100644 index 00000000..67508e56 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/METADATA @@ -0,0 +1,68 @@ +Metadata-Version: 2.4 +Name: cffi +Version: 2.0.0 +Summary: Foreign Function Interface for Python calling C code. +Author: Armin Rigo, Maciej Fijalkowski +Maintainer: Matt Davis, Matt Clay, Matti Picus +License-Expression: MIT +Project-URL: Documentation, https://cffi.readthedocs.io/ +Project-URL: Changelog, https://cffi.readthedocs.io/en/latest/whatsnew.html +Project-URL: Downloads, https://github.com/python-cffi/cffi/releases +Project-URL: Contact, https://groups.google.com/forum/#!forum/python-cffi +Project-URL: Source Code, https://github.com/python-cffi/cffi +Project-URL: Issue Tracker, https://github.com/python-cffi/cffi/issues +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: 3.14 +Classifier: Programming Language :: Python :: Free Threading :: 2 - Beta +Classifier: Programming Language :: Python :: Implementation :: CPython +Requires-Python: >=3.9 +Description-Content-Type: text/markdown +License-File: LICENSE +License-File: AUTHORS +Requires-Dist: pycparser; implementation_name != "PyPy" +Dynamic: license-file + +[![GitHub Actions Status](https://github.com/python-cffi/cffi/actions/workflows/ci.yaml/badge.svg?branch=main)](https://github.com/python-cffi/cffi/actions/workflows/ci.yaml?query=branch%3Amain++) +[![PyPI version](https://img.shields.io/pypi/v/cffi.svg)](https://pypi.org/project/cffi) +[![Read the Docs](https://img.shields.io/badge/docs-latest-blue.svg)][Documentation] + + +CFFI +==== + +Foreign Function Interface for Python calling C code. + +Please see the [Documentation] or uncompiled in the `doc/` subdirectory. + +Download +-------- + +[Download page](https://github.com/python-cffi/cffi/releases) + +Source Code +----------- + +Source code is publicly available on +[GitHub](https://github.com/python-cffi/cffi). + +Contact +------- + +[Mailing list](https://groups.google.com/forum/#!forum/python-cffi) + +Testing/development tips +------------------------ + +After `git clone` or `wget && tar`, we will get a directory called `cffi` or `cffi-x.x.x`. we call it `repo-directory`. To run tests under CPython, run the following in the `repo-directory`: + + pip install pytest + pip install -e . # editable install of CFFI for local development + pytest src/c/ testing/ + +[Documentation]: http://cffi.readthedocs.org/ diff --git a/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/RECORD b/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/RECORD new file mode 100644 index 00000000..6f822989 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/RECORD @@ -0,0 +1,49 @@ +_cffi_backend.cpython-312-x86_64-linux-gnu.so,sha256=AGLtw5fn9u4Cmwk3BbGlsXG7VZEvQekABMyEGuRZmcE,348808 +cffi-2.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +cffi-2.0.0.dist-info/METADATA,sha256=uYzn40F68Im8EtXHNBLZs7FoPM-OxzyYbDWsjJvhujk,2559 +cffi-2.0.0.dist-info/RECORD,, +cffi-2.0.0.dist-info/WHEEL,sha256=aSgG0F4rGPZtV0iTEIfy6dtHq6g67Lze3uLfk0vWn88,151 +cffi-2.0.0.dist-info/entry_points.txt,sha256=y6jTxnyeuLnL-XJcDv8uML3n6wyYiGRg8MTp_QGJ9Ho,75 +cffi-2.0.0.dist-info/licenses/AUTHORS,sha256=KmemC7-zN1nWfWRf8TG45ta8TK_CMtdR_Kw-2k0xTMg,208 +cffi-2.0.0.dist-info/licenses/LICENSE,sha256=W6JN3FcGf5JJrdZEw6_EGl1tw34jQz73Wdld83Cwr2M,1123 +cffi-2.0.0.dist-info/top_level.txt,sha256=rE7WR3rZfNKxWI9-jn6hsHCAl7MDkB-FmuQbxWjFehQ,19 +cffi/__init__.py,sha256=-ksBQ7MfDzVvbBlV_ftYBWAmEqfA86ljIzMxzaZeAlI,511 +cffi/__pycache__/__init__.cpython-312.pyc,, +cffi/__pycache__/_imp_emulation.cpython-312.pyc,, +cffi/__pycache__/_shimmed_dist_utils.cpython-312.pyc,, +cffi/__pycache__/api.cpython-312.pyc,, +cffi/__pycache__/backend_ctypes.cpython-312.pyc,, +cffi/__pycache__/cffi_opcode.cpython-312.pyc,, +cffi/__pycache__/commontypes.cpython-312.pyc,, +cffi/__pycache__/cparser.cpython-312.pyc,, +cffi/__pycache__/error.cpython-312.pyc,, +cffi/__pycache__/ffiplatform.cpython-312.pyc,, +cffi/__pycache__/lock.cpython-312.pyc,, +cffi/__pycache__/model.cpython-312.pyc,, +cffi/__pycache__/pkgconfig.cpython-312.pyc,, +cffi/__pycache__/recompiler.cpython-312.pyc,, +cffi/__pycache__/setuptools_ext.cpython-312.pyc,, +cffi/__pycache__/vengine_cpy.cpython-312.pyc,, +cffi/__pycache__/vengine_gen.cpython-312.pyc,, +cffi/__pycache__/verifier.cpython-312.pyc,, +cffi/_cffi_errors.h,sha256=zQXt7uR_m8gUW-fI2hJg0KoSkJFwXv8RGUkEDZ177dQ,3908 +cffi/_cffi_include.h,sha256=Exhmgm9qzHWzWivjfTe0D7Xp4rPUkVxdNuwGhMTMzbw,15055 +cffi/_embedding.h,sha256=Ai33FHblE7XSpHOCp8kPcWwN5_9BV14OvN0JVa6ITpw,18786 +cffi/_imp_emulation.py,sha256=RxREG8zAbI2RPGBww90u_5fi8sWdahpdipOoPzkp7C0,2960 +cffi/_shimmed_dist_utils.py,sha256=Bjj2wm8yZbvFvWEx5AEfmqaqZyZFhYfoyLLQHkXZuao,2230 +cffi/api.py,sha256=alBv6hZQkjpmZplBphdaRn2lPO9-CORs_M7ixabvZWI,42169 +cffi/backend_ctypes.py,sha256=h5ZIzLc6BFVXnGyc9xPqZWUS7qGy7yFSDqXe68Sa8z4,42454 +cffi/cffi_opcode.py,sha256=JDV5l0R0_OadBX_uE7xPPTYtMdmpp8I9UYd6av7aiDU,5731 +cffi/commontypes.py,sha256=7N6zPtCFlvxXMWhHV08psUjdYIK2XgsN3yo5dgua_v4,2805 +cffi/cparser.py,sha256=QUTfmlL-aO-MYR8bFGlvAUHc36OQr7XYLe0WLkGFjRo,44790 +cffi/error.py,sha256=v6xTiS4U0kvDcy4h_BDRo5v39ZQuj-IMRYLv5ETddZs,877 +cffi/ffiplatform.py,sha256=avxFjdikYGJoEtmJO7ewVmwG_VEVl6EZ_WaNhZYCqv4,3584 +cffi/lock.py,sha256=l9TTdwMIMpi6jDkJGnQgE9cvTIR7CAntIJr8EGHt3pY,747 +cffi/model.py,sha256=W30UFQZE73jL5Mx5N81YT77us2W2iJjTm0XYfnwz1cg,21797 +cffi/parse_c_type.h,sha256=OdwQfwM9ktq6vlCB43exFQmxDBtj2MBNdK8LYl15tjw,5976 +cffi/pkgconfig.py,sha256=LP1w7vmWvmKwyqLaU1Z243FOWGNQMrgMUZrvgFuOlco,4374 +cffi/recompiler.py,sha256=78J6lMEEOygXNmjN9-fOFFO3j7eW-iFxSrxfvQb54bY,65509 +cffi/setuptools_ext.py,sha256=0rCwBJ1W7FHWtiMKfNXsSST88V8UXrui5oeXFlDNLG8,9411 +cffi/vengine_cpy.py,sha256=oyQKD23kpE0aChUKA8Jg0e723foPiYzLYEdb-J0MiNs,43881 +cffi/vengine_gen.py,sha256=DUlEIrDiVin1Pnhn1sfoamnS5NLqfJcOdhRoeSNeJRg,26939 +cffi/verifier.py,sha256=oX8jpaohg2Qm3aHcznidAdvrVm5N4sQYG0a3Eo5mIl4,11182 diff --git a/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/WHEEL b/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/WHEEL new file mode 100644 index 00000000..e21e9f2f --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: setuptools (80.9.0) +Root-Is-Purelib: false +Tag: cp312-cp312-manylinux_2_17_x86_64 +Tag: cp312-cp312-manylinux2014_x86_64 + diff --git a/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/entry_points.txt b/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/entry_points.txt new file mode 100644 index 00000000..4b0274f2 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[distutils.setup_keywords] +cffi_modules = cffi.setuptools_ext:cffi_modules diff --git a/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/licenses/AUTHORS b/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/licenses/AUTHORS new file mode 100644 index 00000000..370a25d3 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/licenses/AUTHORS @@ -0,0 +1,8 @@ +This package has been mostly done by Armin Rigo with help from +Maciej Fijałkowski. The idea is heavily based (although not directly +copied) from LuaJIT ffi by Mike Pall. + + +Other contributors: + + Google Inc. diff --git a/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/licenses/LICENSE b/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/licenses/LICENSE new file mode 100644 index 00000000..0a1dbfb0 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/licenses/LICENSE @@ -0,0 +1,23 @@ + +Except when otherwise stated (look for LICENSE files in directories or +information at the beginning of each file) all software and +documentation is licensed as follows: + + MIT No Attribution + + Permission is hereby granted, free of charge, to any person + obtaining a copy of this software and associated documentation + files (the "Software"), to deal in the Software without + restriction, including without limitation the rights to use, + copy, modify, merge, publish, distribute, sublicense, and/or + sell copies of the Software, and to permit persons to whom the + Software is furnished to do so. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL + THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + DEALINGS IN THE SOFTWARE. + diff --git a/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/top_level.txt b/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/top_level.txt new file mode 100644 index 00000000..f6457795 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/top_level.txt @@ -0,0 +1,2 @@ +_cffi_backend +cffi diff --git a/venv/lib/python3.12/site-packages/cffi/__init__.py b/venv/lib/python3.12/site-packages/cffi/__init__.py new file mode 100644 index 00000000..c99ec3d4 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi/__init__.py @@ -0,0 +1,14 @@ +__all__ = ['FFI', 'VerificationError', 'VerificationMissing', 'CDefError', + 'FFIError'] + +from .api import FFI +from .error import CDefError, FFIError, VerificationError, VerificationMissing +from .error import PkgConfigError + +__version__ = "2.0.0" +__version_info__ = (2, 0, 0) + +# The verifier module file names are based on the CRC32 of a string that +# contains the following version number. It may be older than __version__ +# if nothing is clearly incompatible. +__version_verifier_modules__ = "0.8.6" diff --git a/venv/lib/python3.12/site-packages/cffi/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/cffi/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..a36694e9 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cffi/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cffi/__pycache__/_imp_emulation.cpython-312.pyc b/venv/lib/python3.12/site-packages/cffi/__pycache__/_imp_emulation.cpython-312.pyc new file mode 100644 index 00000000..83cee854 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cffi/__pycache__/_imp_emulation.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cffi/__pycache__/_shimmed_dist_utils.cpython-312.pyc b/venv/lib/python3.12/site-packages/cffi/__pycache__/_shimmed_dist_utils.cpython-312.pyc new file mode 100644 index 00000000..4cd16077 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cffi/__pycache__/_shimmed_dist_utils.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cffi/__pycache__/api.cpython-312.pyc b/venv/lib/python3.12/site-packages/cffi/__pycache__/api.cpython-312.pyc new file mode 100644 index 00000000..81ce9f0b Binary files /dev/null and b/venv/lib/python3.12/site-packages/cffi/__pycache__/api.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cffi/__pycache__/backend_ctypes.cpython-312.pyc b/venv/lib/python3.12/site-packages/cffi/__pycache__/backend_ctypes.cpython-312.pyc new file mode 100644 index 00000000..a93ef95d Binary files /dev/null and b/venv/lib/python3.12/site-packages/cffi/__pycache__/backend_ctypes.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cffi/__pycache__/cffi_opcode.cpython-312.pyc b/venv/lib/python3.12/site-packages/cffi/__pycache__/cffi_opcode.cpython-312.pyc new file mode 100644 index 00000000..490b6916 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cffi/__pycache__/cffi_opcode.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cffi/__pycache__/commontypes.cpython-312.pyc b/venv/lib/python3.12/site-packages/cffi/__pycache__/commontypes.cpython-312.pyc new file mode 100644 index 00000000..222daff8 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cffi/__pycache__/commontypes.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cffi/__pycache__/cparser.cpython-312.pyc b/venv/lib/python3.12/site-packages/cffi/__pycache__/cparser.cpython-312.pyc new file mode 100644 index 00000000..ef859de0 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cffi/__pycache__/cparser.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cffi/__pycache__/error.cpython-312.pyc b/venv/lib/python3.12/site-packages/cffi/__pycache__/error.cpython-312.pyc new file mode 100644 index 00000000..b402d8fc Binary files /dev/null and b/venv/lib/python3.12/site-packages/cffi/__pycache__/error.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cffi/__pycache__/ffiplatform.cpython-312.pyc b/venv/lib/python3.12/site-packages/cffi/__pycache__/ffiplatform.cpython-312.pyc new file mode 100644 index 00000000..5b83012f Binary files /dev/null and b/venv/lib/python3.12/site-packages/cffi/__pycache__/ffiplatform.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cffi/__pycache__/lock.cpython-312.pyc b/venv/lib/python3.12/site-packages/cffi/__pycache__/lock.cpython-312.pyc new file mode 100644 index 00000000..610c07b8 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cffi/__pycache__/lock.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cffi/__pycache__/model.cpython-312.pyc b/venv/lib/python3.12/site-packages/cffi/__pycache__/model.cpython-312.pyc new file mode 100644 index 00000000..9b46baae Binary files /dev/null and b/venv/lib/python3.12/site-packages/cffi/__pycache__/model.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cffi/__pycache__/pkgconfig.cpython-312.pyc b/venv/lib/python3.12/site-packages/cffi/__pycache__/pkgconfig.cpython-312.pyc new file mode 100644 index 00000000..f347a250 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cffi/__pycache__/pkgconfig.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cffi/__pycache__/recompiler.cpython-312.pyc b/venv/lib/python3.12/site-packages/cffi/__pycache__/recompiler.cpython-312.pyc new file mode 100644 index 00000000..a37091fd Binary files /dev/null and b/venv/lib/python3.12/site-packages/cffi/__pycache__/recompiler.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cffi/__pycache__/setuptools_ext.cpython-312.pyc b/venv/lib/python3.12/site-packages/cffi/__pycache__/setuptools_ext.cpython-312.pyc new file mode 100644 index 00000000..3f42a3ef Binary files /dev/null and b/venv/lib/python3.12/site-packages/cffi/__pycache__/setuptools_ext.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cffi/__pycache__/vengine_cpy.cpython-312.pyc b/venv/lib/python3.12/site-packages/cffi/__pycache__/vengine_cpy.cpython-312.pyc new file mode 100644 index 00000000..9d6f39b7 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cffi/__pycache__/vengine_cpy.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cffi/__pycache__/vengine_gen.cpython-312.pyc b/venv/lib/python3.12/site-packages/cffi/__pycache__/vengine_gen.cpython-312.pyc new file mode 100644 index 00000000..5f72a84c Binary files /dev/null and b/venv/lib/python3.12/site-packages/cffi/__pycache__/vengine_gen.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cffi/__pycache__/verifier.cpython-312.pyc b/venv/lib/python3.12/site-packages/cffi/__pycache__/verifier.cpython-312.pyc new file mode 100644 index 00000000..12ef1bd4 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cffi/__pycache__/verifier.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cffi/_cffi_errors.h b/venv/lib/python3.12/site-packages/cffi/_cffi_errors.h new file mode 100644 index 00000000..158e0590 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi/_cffi_errors.h @@ -0,0 +1,149 @@ +#ifndef CFFI_MESSAGEBOX +# ifdef _MSC_VER +# define CFFI_MESSAGEBOX 1 +# else +# define CFFI_MESSAGEBOX 0 +# endif +#endif + + +#if CFFI_MESSAGEBOX +/* Windows only: logic to take the Python-CFFI embedding logic + initialization errors and display them in a background thread + with MessageBox. The idea is that if the whole program closes + as a result of this problem, then likely it is already a console + program and you can read the stderr output in the console too. + If it is not a console program, then it will likely show its own + dialog to complain, or generally not abruptly close, and for this + case the background thread should stay alive. +*/ +static void *volatile _cffi_bootstrap_text; + +static PyObject *_cffi_start_error_capture(void) +{ + PyObject *result = NULL; + PyObject *x, *m, *bi; + + if (InterlockedCompareExchangePointer(&_cffi_bootstrap_text, + (void *)1, NULL) != NULL) + return (PyObject *)1; + + m = PyImport_AddModule("_cffi_error_capture"); + if (m == NULL) + goto error; + + result = PyModule_GetDict(m); + if (result == NULL) + goto error; + +#if PY_MAJOR_VERSION >= 3 + bi = PyImport_ImportModule("builtins"); +#else + bi = PyImport_ImportModule("__builtin__"); +#endif + if (bi == NULL) + goto error; + PyDict_SetItemString(result, "__builtins__", bi); + Py_DECREF(bi); + + x = PyRun_String( + "import sys\n" + "class FileLike:\n" + " def write(self, x):\n" + " try:\n" + " of.write(x)\n" + " except: pass\n" + " self.buf += x\n" + " def flush(self):\n" + " pass\n" + "fl = FileLike()\n" + "fl.buf = ''\n" + "of = sys.stderr\n" + "sys.stderr = fl\n" + "def done():\n" + " sys.stderr = of\n" + " return fl.buf\n", /* make sure the returned value stays alive */ + Py_file_input, + result, result); + Py_XDECREF(x); + + error: + if (PyErr_Occurred()) + { + PyErr_WriteUnraisable(Py_None); + PyErr_Clear(); + } + return result; +} + +#pragma comment(lib, "user32.lib") + +static DWORD WINAPI _cffi_bootstrap_dialog(LPVOID ignored) +{ + Sleep(666); /* may be interrupted if the whole process is closing */ +#if PY_MAJOR_VERSION >= 3 + MessageBoxW(NULL, (wchar_t *)_cffi_bootstrap_text, + L"Python-CFFI error", + MB_OK | MB_ICONERROR); +#else + MessageBoxA(NULL, (char *)_cffi_bootstrap_text, + "Python-CFFI error", + MB_OK | MB_ICONERROR); +#endif + _cffi_bootstrap_text = NULL; + return 0; +} + +static void _cffi_stop_error_capture(PyObject *ecap) +{ + PyObject *s; + void *text; + + if (ecap == (PyObject *)1) + return; + + if (ecap == NULL) + goto error; + + s = PyRun_String("done()", Py_eval_input, ecap, ecap); + if (s == NULL) + goto error; + + /* Show a dialog box, but in a background thread, and + never show multiple dialog boxes at once. */ +#if PY_MAJOR_VERSION >= 3 + text = PyUnicode_AsWideCharString(s, NULL); +#else + text = PyString_AsString(s); +#endif + + _cffi_bootstrap_text = text; + + if (text != NULL) + { + HANDLE h; + h = CreateThread(NULL, 0, _cffi_bootstrap_dialog, + NULL, 0, NULL); + if (h != NULL) + CloseHandle(h); + } + /* decref the string, but it should stay alive as 'fl.buf' + in the small module above. It will really be freed only if + we later get another similar error. So it's a leak of at + most one copy of the small module. That's fine for this + situation which is usually a "fatal error" anyway. */ + Py_DECREF(s); + PyErr_Clear(); + return; + + error: + _cffi_bootstrap_text = NULL; + PyErr_Clear(); +} + +#else + +static PyObject *_cffi_start_error_capture(void) { return NULL; } +static void _cffi_stop_error_capture(PyObject *ecap) { } + +#endif diff --git a/venv/lib/python3.12/site-packages/cffi/_cffi_include.h b/venv/lib/python3.12/site-packages/cffi/_cffi_include.h new file mode 100644 index 00000000..908a1d73 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi/_cffi_include.h @@ -0,0 +1,389 @@ +#define _CFFI_ + +/* We try to define Py_LIMITED_API before including Python.h. + + Mess: we can only define it if Py_DEBUG, Py_TRACE_REFS and + Py_REF_DEBUG are not defined. This is a best-effort approximation: + we can learn about Py_DEBUG from pyconfig.h, but it is unclear if + the same works for the other two macros. Py_DEBUG implies them, + but not the other way around. + + The implementation is messy (issue #350): on Windows, with _MSC_VER, + we have to define Py_LIMITED_API even before including pyconfig.h. + In that case, we guess what pyconfig.h will do to the macros above, + and check our guess after the #include. + + Note that on Windows, with CPython 3.x, you need >= 3.5 and virtualenv + version >= 16.0.0. With older versions of either, you don't get a + copy of PYTHON3.DLL in the virtualenv. We can't check the version of + CPython *before* we even include pyconfig.h. ffi.set_source() puts + a ``#define _CFFI_NO_LIMITED_API'' at the start of this file if it is + running on Windows < 3.5, as an attempt at fixing it, but that's + arguably wrong because it may not be the target version of Python. + Still better than nothing I guess. As another workaround, you can + remove the definition of Py_LIMITED_API here. + + See also 'py_limited_api' in cffi/setuptools_ext.py. +*/ +#if !defined(_CFFI_USE_EMBEDDING) && !defined(Py_LIMITED_API) +# ifdef _MSC_VER +# if !defined(_DEBUG) && !defined(Py_DEBUG) && !defined(Py_TRACE_REFS) && !defined(Py_REF_DEBUG) && !defined(_CFFI_NO_LIMITED_API) +# define Py_LIMITED_API +# endif +# include + /* sanity-check: Py_LIMITED_API will cause crashes if any of these + are also defined. Normally, the Python file PC/pyconfig.h does not + cause any of these to be defined, with the exception that _DEBUG + causes Py_DEBUG. Double-check that. */ +# ifdef Py_LIMITED_API +# if defined(Py_DEBUG) +# error "pyconfig.h unexpectedly defines Py_DEBUG, but Py_LIMITED_API is set" +# endif +# if defined(Py_TRACE_REFS) +# error "pyconfig.h unexpectedly defines Py_TRACE_REFS, but Py_LIMITED_API is set" +# endif +# if defined(Py_REF_DEBUG) +# error "pyconfig.h unexpectedly defines Py_REF_DEBUG, but Py_LIMITED_API is set" +# endif +# endif +# else +# include +# if !defined(Py_DEBUG) && !defined(Py_TRACE_REFS) && !defined(Py_REF_DEBUG) && !defined(_CFFI_NO_LIMITED_API) +# define Py_LIMITED_API +# endif +# endif +#endif + +#include +#ifdef __cplusplus +extern "C" { +#endif +#include +#include "parse_c_type.h" + +/* this block of #ifs should be kept exactly identical between + c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py + and cffi/_cffi_include.h */ +#if defined(_MSC_VER) +# include /* for alloca() */ +# if _MSC_VER < 1600 /* MSVC < 2010 */ + typedef __int8 int8_t; + typedef __int16 int16_t; + typedef __int32 int32_t; + typedef __int64 int64_t; + typedef unsigned __int8 uint8_t; + typedef unsigned __int16 uint16_t; + typedef unsigned __int32 uint32_t; + typedef unsigned __int64 uint64_t; + typedef __int8 int_least8_t; + typedef __int16 int_least16_t; + typedef __int32 int_least32_t; + typedef __int64 int_least64_t; + typedef unsigned __int8 uint_least8_t; + typedef unsigned __int16 uint_least16_t; + typedef unsigned __int32 uint_least32_t; + typedef unsigned __int64 uint_least64_t; + typedef __int8 int_fast8_t; + typedef __int16 int_fast16_t; + typedef __int32 int_fast32_t; + typedef __int64 int_fast64_t; + typedef unsigned __int8 uint_fast8_t; + typedef unsigned __int16 uint_fast16_t; + typedef unsigned __int32 uint_fast32_t; + typedef unsigned __int64 uint_fast64_t; + typedef __int64 intmax_t; + typedef unsigned __int64 uintmax_t; +# else +# include +# endif +# if _MSC_VER < 1800 /* MSVC < 2013 */ +# ifndef __cplusplus + typedef unsigned char _Bool; +# endif +# endif +# define _cffi_float_complex_t _Fcomplex /* include for it */ +# define _cffi_double_complex_t _Dcomplex /* include for it */ +#else +# include +# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux) +# include +# endif +# define _cffi_float_complex_t float _Complex +# define _cffi_double_complex_t double _Complex +#endif + +#ifdef __GNUC__ +# define _CFFI_UNUSED_FN __attribute__((unused)) +#else +# define _CFFI_UNUSED_FN /* nothing */ +#endif + +#ifdef __cplusplus +# ifndef _Bool + typedef bool _Bool; /* semi-hackish: C++ has no _Bool; bool is builtin */ +# endif +#endif + +/********** CPython-specific section **********/ +#ifndef PYPY_VERSION + + +#if PY_MAJOR_VERSION >= 3 +# define PyInt_FromLong PyLong_FromLong +#endif + +#define _cffi_from_c_double PyFloat_FromDouble +#define _cffi_from_c_float PyFloat_FromDouble +#define _cffi_from_c_long PyInt_FromLong +#define _cffi_from_c_ulong PyLong_FromUnsignedLong +#define _cffi_from_c_longlong PyLong_FromLongLong +#define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong +#define _cffi_from_c__Bool PyBool_FromLong + +#define _cffi_to_c_double PyFloat_AsDouble +#define _cffi_to_c_float PyFloat_AsDouble + +#define _cffi_from_c_int(x, type) \ + (((type)-1) > 0 ? /* unsigned */ \ + (sizeof(type) < sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + sizeof(type) == sizeof(long) ? \ + PyLong_FromUnsignedLong((unsigned long)x) : \ + PyLong_FromUnsignedLongLong((unsigned long long)x)) : \ + (sizeof(type) <= sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + PyLong_FromLongLong((long long)x))) + +#define _cffi_to_c_int(o, type) \ + ((type)( \ + sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o) \ + : (type)_cffi_to_c_i8(o)) : \ + sizeof(type) == 2 ? (((type)-1) > 0 ? (type)_cffi_to_c_u16(o) \ + : (type)_cffi_to_c_i16(o)) : \ + sizeof(type) == 4 ? (((type)-1) > 0 ? (type)_cffi_to_c_u32(o) \ + : (type)_cffi_to_c_i32(o)) : \ + sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o) \ + : (type)_cffi_to_c_i64(o)) : \ + (Py_FatalError("unsupported size for type " #type), (type)0))) + +#define _cffi_to_c_i8 \ + ((int(*)(PyObject *))_cffi_exports[1]) +#define _cffi_to_c_u8 \ + ((int(*)(PyObject *))_cffi_exports[2]) +#define _cffi_to_c_i16 \ + ((int(*)(PyObject *))_cffi_exports[3]) +#define _cffi_to_c_u16 \ + ((int(*)(PyObject *))_cffi_exports[4]) +#define _cffi_to_c_i32 \ + ((int(*)(PyObject *))_cffi_exports[5]) +#define _cffi_to_c_u32 \ + ((unsigned int(*)(PyObject *))_cffi_exports[6]) +#define _cffi_to_c_i64 \ + ((long long(*)(PyObject *))_cffi_exports[7]) +#define _cffi_to_c_u64 \ + ((unsigned long long(*)(PyObject *))_cffi_exports[8]) +#define _cffi_to_c_char \ + ((int(*)(PyObject *))_cffi_exports[9]) +#define _cffi_from_c_pointer \ + ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[10]) +#define _cffi_to_c_pointer \ + ((char *(*)(PyObject *, struct _cffi_ctypedescr *))_cffi_exports[11]) +#define _cffi_get_struct_layout \ + not used any more +#define _cffi_restore_errno \ + ((void(*)(void))_cffi_exports[13]) +#define _cffi_save_errno \ + ((void(*)(void))_cffi_exports[14]) +#define _cffi_from_c_char \ + ((PyObject *(*)(char))_cffi_exports[15]) +#define _cffi_from_c_deref \ + ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[16]) +#define _cffi_to_c \ + ((int(*)(char *, struct _cffi_ctypedescr *, PyObject *))_cffi_exports[17]) +#define _cffi_from_c_struct \ + ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[18]) +#define _cffi_to_c_wchar_t \ + ((_cffi_wchar_t(*)(PyObject *))_cffi_exports[19]) +#define _cffi_from_c_wchar_t \ + ((PyObject *(*)(_cffi_wchar_t))_cffi_exports[20]) +#define _cffi_to_c_long_double \ + ((long double(*)(PyObject *))_cffi_exports[21]) +#define _cffi_to_c__Bool \ + ((_Bool(*)(PyObject *))_cffi_exports[22]) +#define _cffi_prepare_pointer_call_argument \ + ((Py_ssize_t(*)(struct _cffi_ctypedescr *, \ + PyObject *, char **))_cffi_exports[23]) +#define _cffi_convert_array_from_object \ + ((int(*)(char *, struct _cffi_ctypedescr *, PyObject *))_cffi_exports[24]) +#define _CFFI_CPIDX 25 +#define _cffi_call_python \ + ((void(*)(struct _cffi_externpy_s *, char *))_cffi_exports[_CFFI_CPIDX]) +#define _cffi_to_c_wchar3216_t \ + ((int(*)(PyObject *))_cffi_exports[26]) +#define _cffi_from_c_wchar3216_t \ + ((PyObject *(*)(int))_cffi_exports[27]) +#define _CFFI_NUM_EXPORTS 28 + +struct _cffi_ctypedescr; + +static void *_cffi_exports[_CFFI_NUM_EXPORTS]; + +#define _cffi_type(index) ( \ + assert((((uintptr_t)_cffi_types[index]) & 1) == 0), \ + (struct _cffi_ctypedescr *)_cffi_types[index]) + +static PyObject *_cffi_init(const char *module_name, Py_ssize_t version, + const struct _cffi_type_context_s *ctx) +{ + PyObject *module, *o_arg, *new_module; + void *raw[] = { + (void *)module_name, + (void *)version, + (void *)_cffi_exports, + (void *)ctx, + }; + + module = PyImport_ImportModule("_cffi_backend"); + if (module == NULL) + goto failure; + + o_arg = PyLong_FromVoidPtr((void *)raw); + if (o_arg == NULL) + goto failure; + + new_module = PyObject_CallMethod( + module, (char *)"_init_cffi_1_0_external_module", (char *)"O", o_arg); + + Py_DECREF(o_arg); + Py_DECREF(module); + return new_module; + + failure: + Py_XDECREF(module); + return NULL; +} + + +#ifdef HAVE_WCHAR_H +typedef wchar_t _cffi_wchar_t; +#else +typedef uint16_t _cffi_wchar_t; /* same random pick as _cffi_backend.c */ +#endif + +_CFFI_UNUSED_FN static uint16_t _cffi_to_c_char16_t(PyObject *o) +{ + if (sizeof(_cffi_wchar_t) == 2) + return (uint16_t)_cffi_to_c_wchar_t(o); + else + return (uint16_t)_cffi_to_c_wchar3216_t(o); +} + +_CFFI_UNUSED_FN static PyObject *_cffi_from_c_char16_t(uint16_t x) +{ + if (sizeof(_cffi_wchar_t) == 2) + return _cffi_from_c_wchar_t((_cffi_wchar_t)x); + else + return _cffi_from_c_wchar3216_t((int)x); +} + +_CFFI_UNUSED_FN static int _cffi_to_c_char32_t(PyObject *o) +{ + if (sizeof(_cffi_wchar_t) == 4) + return (int)_cffi_to_c_wchar_t(o); + else + return (int)_cffi_to_c_wchar3216_t(o); +} + +_CFFI_UNUSED_FN static PyObject *_cffi_from_c_char32_t(unsigned int x) +{ + if (sizeof(_cffi_wchar_t) == 4) + return _cffi_from_c_wchar_t((_cffi_wchar_t)x); + else + return _cffi_from_c_wchar3216_t((int)x); +} + +union _cffi_union_alignment_u { + unsigned char m_char; + unsigned short m_short; + unsigned int m_int; + unsigned long m_long; + unsigned long long m_longlong; + float m_float; + double m_double; + long double m_longdouble; +}; + +struct _cffi_freeme_s { + struct _cffi_freeme_s *next; + union _cffi_union_alignment_u alignment; +}; + +_CFFI_UNUSED_FN static int +_cffi_convert_array_argument(struct _cffi_ctypedescr *ctptr, PyObject *arg, + char **output_data, Py_ssize_t datasize, + struct _cffi_freeme_s **freeme) +{ + char *p; + if (datasize < 0) + return -1; + + p = *output_data; + if (p == NULL) { + struct _cffi_freeme_s *fp = (struct _cffi_freeme_s *)PyObject_Malloc( + offsetof(struct _cffi_freeme_s, alignment) + (size_t)datasize); + if (fp == NULL) + return -1; + fp->next = *freeme; + *freeme = fp; + p = *output_data = (char *)&fp->alignment; + } + memset((void *)p, 0, (size_t)datasize); + return _cffi_convert_array_from_object(p, ctptr, arg); +} + +_CFFI_UNUSED_FN static void +_cffi_free_array_arguments(struct _cffi_freeme_s *freeme) +{ + do { + void *p = (void *)freeme; + freeme = freeme->next; + PyObject_Free(p); + } while (freeme != NULL); +} + +/********** end CPython-specific section **********/ +#else +_CFFI_UNUSED_FN +static void (*_cffi_call_python_org)(struct _cffi_externpy_s *, char *); +# define _cffi_call_python _cffi_call_python_org +#endif + + +#define _cffi_array_len(array) (sizeof(array) / sizeof((array)[0])) + +#define _cffi_prim_int(size, sign) \ + ((size) == 1 ? ((sign) ? _CFFI_PRIM_INT8 : _CFFI_PRIM_UINT8) : \ + (size) == 2 ? ((sign) ? _CFFI_PRIM_INT16 : _CFFI_PRIM_UINT16) : \ + (size) == 4 ? ((sign) ? _CFFI_PRIM_INT32 : _CFFI_PRIM_UINT32) : \ + (size) == 8 ? ((sign) ? _CFFI_PRIM_INT64 : _CFFI_PRIM_UINT64) : \ + _CFFI__UNKNOWN_PRIM) + +#define _cffi_prim_float(size) \ + ((size) == sizeof(float) ? _CFFI_PRIM_FLOAT : \ + (size) == sizeof(double) ? _CFFI_PRIM_DOUBLE : \ + (size) == sizeof(long double) ? _CFFI__UNKNOWN_LONG_DOUBLE : \ + _CFFI__UNKNOWN_FLOAT_PRIM) + +#define _cffi_check_int(got, got_nonpos, expected) \ + ((got_nonpos) == (expected <= 0) && \ + (got) == (unsigned long long)expected) + +#ifdef MS_WIN32 +# define _cffi_stdcall __stdcall +#else +# define _cffi_stdcall /* nothing */ +#endif + +#ifdef __cplusplus +} +#endif diff --git a/venv/lib/python3.12/site-packages/cffi/_embedding.h b/venv/lib/python3.12/site-packages/cffi/_embedding.h new file mode 100644 index 00000000..64c04f67 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi/_embedding.h @@ -0,0 +1,550 @@ + +/***** Support code for embedding *****/ + +#ifdef __cplusplus +extern "C" { +#endif + + +#if defined(_WIN32) +# define CFFI_DLLEXPORT __declspec(dllexport) +#elif defined(__GNUC__) +# define CFFI_DLLEXPORT __attribute__((visibility("default"))) +#else +# define CFFI_DLLEXPORT /* nothing */ +#endif + + +/* There are two global variables of type _cffi_call_python_fnptr: + + * _cffi_call_python, which we declare just below, is the one called + by ``extern "Python"`` implementations. + + * _cffi_call_python_org, which on CPython is actually part of the + _cffi_exports[] array, is the function pointer copied from + _cffi_backend. If _cffi_start_python() fails, then this is set + to NULL; otherwise, it should never be NULL. + + After initialization is complete, both are equal. However, the + first one remains equal to &_cffi_start_and_call_python until the + very end of initialization, when we are (or should be) sure that + concurrent threads also see a completely initialized world, and + only then is it changed. +*/ +#undef _cffi_call_python +typedef void (*_cffi_call_python_fnptr)(struct _cffi_externpy_s *, char *); +static void _cffi_start_and_call_python(struct _cffi_externpy_s *, char *); +static _cffi_call_python_fnptr _cffi_call_python = &_cffi_start_and_call_python; + + +#ifndef _MSC_VER + /* --- Assuming a GCC not infinitely old --- */ +# define cffi_compare_and_swap(l,o,n) __sync_bool_compare_and_swap(l,o,n) +# define cffi_write_barrier() __sync_synchronize() +# if !defined(__amd64__) && !defined(__x86_64__) && \ + !defined(__i386__) && !defined(__i386) +# define cffi_read_barrier() __sync_synchronize() +# else +# define cffi_read_barrier() (void)0 +# endif +#else + /* --- Windows threads version --- */ +# include +# define cffi_compare_and_swap(l,o,n) \ + (InterlockedCompareExchangePointer(l,n,o) == (o)) +# define cffi_write_barrier() InterlockedCompareExchange(&_cffi_dummy,0,0) +# define cffi_read_barrier() (void)0 +static volatile LONG _cffi_dummy; +#endif + +#ifdef WITH_THREAD +# ifndef _MSC_VER +# include + static pthread_mutex_t _cffi_embed_startup_lock; +# else + static CRITICAL_SECTION _cffi_embed_startup_lock; +# endif + static char _cffi_embed_startup_lock_ready = 0; +#endif + +static void _cffi_acquire_reentrant_mutex(void) +{ + static void *volatile lock = NULL; + + while (!cffi_compare_and_swap(&lock, NULL, (void *)1)) { + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: pthread_mutex_init() should be very fast, and + this is only run at start-up anyway. */ + } + +#ifdef WITH_THREAD + if (!_cffi_embed_startup_lock_ready) { +# ifndef _MSC_VER + pthread_mutexattr_t attr; + pthread_mutexattr_init(&attr); + pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_RECURSIVE); + pthread_mutex_init(&_cffi_embed_startup_lock, &attr); +# else + InitializeCriticalSection(&_cffi_embed_startup_lock); +# endif + _cffi_embed_startup_lock_ready = 1; + } +#endif + + while (!cffi_compare_and_swap(&lock, (void *)1, NULL)) + ; + +#ifndef _MSC_VER + pthread_mutex_lock(&_cffi_embed_startup_lock); +#else + EnterCriticalSection(&_cffi_embed_startup_lock); +#endif +} + +static void _cffi_release_reentrant_mutex(void) +{ +#ifndef _MSC_VER + pthread_mutex_unlock(&_cffi_embed_startup_lock); +#else + LeaveCriticalSection(&_cffi_embed_startup_lock); +#endif +} + + +/********** CPython-specific section **********/ +#ifndef PYPY_VERSION + +#include "_cffi_errors.h" + + +#define _cffi_call_python_org _cffi_exports[_CFFI_CPIDX] + +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(void); /* forward */ + +static void _cffi_py_initialize(void) +{ + /* XXX use initsigs=0, which "skips initialization registration of + signal handlers, which might be useful when Python is + embedded" according to the Python docs. But review and think + if it should be a user-controllable setting. + + XXX we should also give a way to write errors to a buffer + instead of to stderr. + + XXX if importing 'site' fails, CPython (any version) calls + exit(). Should we try to work around this behavior here? + */ + Py_InitializeEx(0); +} + +static int _cffi_initialize_python(void) +{ + /* This initializes Python, imports _cffi_backend, and then the + present .dll/.so is set up as a CPython C extension module. + */ + int result; + PyGILState_STATE state; + PyObject *pycode=NULL, *global_dict=NULL, *x; + PyObject *builtins; + + state = PyGILState_Ensure(); + + /* Call the initxxx() function from the present module. It will + create and initialize us as a CPython extension module, instead + of letting the startup Python code do it---it might reimport + the same .dll/.so and get maybe confused on some platforms. + It might also have troubles locating the .dll/.so again for all + I know. + */ + (void)_CFFI_PYTHON_STARTUP_FUNC(); + if (PyErr_Occurred()) + goto error; + + /* Now run the Python code provided to ffi.embedding_init_code(). + */ + pycode = Py_CompileString(_CFFI_PYTHON_STARTUP_CODE, + "", + Py_file_input); + if (pycode == NULL) + goto error; + global_dict = PyDict_New(); + if (global_dict == NULL) + goto error; + builtins = PyEval_GetBuiltins(); + if (builtins == NULL) + goto error; + if (PyDict_SetItemString(global_dict, "__builtins__", builtins) < 0) + goto error; + x = PyEval_EvalCode( +#if PY_MAJOR_VERSION < 3 + (PyCodeObject *) +#endif + pycode, global_dict, global_dict); + if (x == NULL) + goto error; + Py_DECREF(x); + + /* Done! Now if we've been called from + _cffi_start_and_call_python() in an ``extern "Python"``, we can + only hope that the Python code did correctly set up the + corresponding @ffi.def_extern() function. Otherwise, the + general logic of ``extern "Python"`` functions (inside the + _cffi_backend module) will find that the reference is still + missing and print an error. + */ + result = 0; + done: + Py_XDECREF(pycode); + Py_XDECREF(global_dict); + PyGILState_Release(state); + return result; + + error:; + { + /* Print as much information as potentially useful. + Debugging load-time failures with embedding is not fun + */ + PyObject *ecap; + PyObject *exception, *v, *tb, *f, *modules, *mod; + PyErr_Fetch(&exception, &v, &tb); + ecap = _cffi_start_error_capture(); + f = PySys_GetObject((char *)"stderr"); + if (f != NULL && f != Py_None) { + PyFile_WriteString( + "Failed to initialize the Python-CFFI embedding logic:\n\n", f); + } + + if (exception != NULL) { + PyErr_NormalizeException(&exception, &v, &tb); + PyErr_Display(exception, v, tb); + } + Py_XDECREF(exception); + Py_XDECREF(v); + Py_XDECREF(tb); + + if (f != NULL && f != Py_None) { + PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME + "\ncompiled with cffi version: 2.0.0" + "\n_cffi_backend module: ", f); + modules = PyImport_GetModuleDict(); + mod = PyDict_GetItemString(modules, "_cffi_backend"); + if (mod == NULL) { + PyFile_WriteString("not loaded", f); + } + else { + v = PyObject_GetAttrString(mod, "__file__"); + PyFile_WriteObject(v, f, 0); + Py_XDECREF(v); + } + PyFile_WriteString("\nsys.path: ", f); + PyFile_WriteObject(PySys_GetObject((char *)"path"), f, 0); + PyFile_WriteString("\n\n", f); + } + _cffi_stop_error_capture(ecap); + } + result = -1; + goto done; +} + +#if PY_VERSION_HEX < 0x03080000 +PyAPI_DATA(char *) _PyParser_TokenNames[]; /* from CPython */ +#endif + +static int _cffi_carefully_make_gil(void) +{ + /* This does the basic initialization of Python. It can be called + completely concurrently from unrelated threads. It assumes + that we don't hold the GIL before (if it exists), and we don't + hold it afterwards. + + (What it really does used to be completely different in Python 2 + and Python 3, with the Python 2 solution avoiding the spin-lock + around the Py_InitializeEx() call. However, after recent changes + to CPython 2.7 (issue #358) it no longer works. So we use the + Python 3 solution everywhere.) + + This initializes Python by calling Py_InitializeEx(). + Important: this must not be called concurrently at all. + So we use a global variable as a simple spin lock. This global + variable must be from 'libpythonX.Y.so', not from this + cffi-based extension module, because it must be shared from + different cffi-based extension modules. + + In Python < 3.8, we choose + _PyParser_TokenNames[0] as a completely arbitrary pointer value + that is never written to. The default is to point to the + string "ENDMARKER". We change it temporarily to point to the + next character in that string. (Yes, I know it's REALLY + obscure.) + + In Python >= 3.8, this string array is no longer writable, so + instead we pick PyCapsuleType.tp_version_tag. We can't change + Python < 3.8 because someone might use a mixture of cffi + embedded modules, some of which were compiled before this file + changed. + + In Python >= 3.12, this stopped working because that particular + tp_version_tag gets modified during interpreter startup. It's + arguably a bad idea before 3.12 too, but again we can't change + that because someone might use a mixture of cffi embedded + modules, and no-one reported a bug so far. In Python >= 3.12 + we go instead for PyCapsuleType.tp_as_buffer, which is supposed + to always be NULL. We write to it temporarily a pointer to + a struct full of NULLs, which is semantically the same. + */ + +#ifdef WITH_THREAD +# if PY_VERSION_HEX < 0x03080000 + char *volatile *lock = (char *volatile *)_PyParser_TokenNames; + char *old_value, *locked_value; + + while (1) { /* spin loop */ + old_value = *lock; + locked_value = old_value + 1; + if (old_value[0] == 'E') { + assert(old_value[1] == 'N'); + if (cffi_compare_and_swap(lock, old_value, locked_value)) + break; + } + else { + assert(old_value[0] == 'N'); + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: PyEval_InitThreads() should be very fast, and + this is only run at start-up anyway. */ + } + } +# else +# if PY_VERSION_HEX < 0x030C0000 + int volatile *lock = (int volatile *)&PyCapsule_Type.tp_version_tag; + int old_value, locked_value = -42; + assert(!(PyCapsule_Type.tp_flags & Py_TPFLAGS_HAVE_VERSION_TAG)); +# else + static struct ebp_s { PyBufferProcs buf; int mark; } empty_buffer_procs; + empty_buffer_procs.mark = -42; + PyBufferProcs *volatile *lock = (PyBufferProcs *volatile *) + &PyCapsule_Type.tp_as_buffer; + PyBufferProcs *old_value, *locked_value = &empty_buffer_procs.buf; +# endif + + while (1) { /* spin loop */ + old_value = *lock; + if (old_value == 0) { + if (cffi_compare_and_swap(lock, old_value, locked_value)) + break; + } + else { +# if PY_VERSION_HEX < 0x030C0000 + assert(old_value == locked_value); +# else + /* The pointer should point to a possibly different + empty_buffer_procs from another C extension module */ + assert(((struct ebp_s *)old_value)->mark == -42); +# endif + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: PyEval_InitThreads() should be very fast, and + this is only run at start-up anyway. */ + } + } +# endif +#endif + + /* call Py_InitializeEx() */ + if (!Py_IsInitialized()) { + _cffi_py_initialize(); +#if PY_VERSION_HEX < 0x03070000 + PyEval_InitThreads(); +#endif + PyEval_SaveThread(); /* release the GIL */ + /* the returned tstate must be the one that has been stored into the + autoTLSkey by _PyGILState_Init() called from Py_Initialize(). */ + } + else { +#if PY_VERSION_HEX < 0x03070000 + /* PyEval_InitThreads() is always a no-op from CPython 3.7 */ + PyGILState_STATE state = PyGILState_Ensure(); + PyEval_InitThreads(); + PyGILState_Release(state); +#endif + } + +#ifdef WITH_THREAD + /* release the lock */ + while (!cffi_compare_and_swap(lock, locked_value, old_value)) + ; +#endif + + return 0; +} + +/********** end CPython-specific section **********/ + + +#else + + +/********** PyPy-specific section **********/ + +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(const void *[]); /* forward */ + +static struct _cffi_pypy_init_s { + const char *name; + void *func; /* function pointer */ + const char *code; +} _cffi_pypy_init = { + _CFFI_MODULE_NAME, + _CFFI_PYTHON_STARTUP_FUNC, + _CFFI_PYTHON_STARTUP_CODE, +}; + +extern int pypy_carefully_make_gil(const char *); +extern int pypy_init_embedded_cffi_module(int, struct _cffi_pypy_init_s *); + +static int _cffi_carefully_make_gil(void) +{ + return pypy_carefully_make_gil(_CFFI_MODULE_NAME); +} + +static int _cffi_initialize_python(void) +{ + return pypy_init_embedded_cffi_module(0xB011, &_cffi_pypy_init); +} + +/********** end PyPy-specific section **********/ + + +#endif + + +#ifdef __GNUC__ +__attribute__((noinline)) +#endif +static _cffi_call_python_fnptr _cffi_start_python(void) +{ + /* Delicate logic to initialize Python. This function can be + called multiple times concurrently, e.g. when the process calls + its first ``extern "Python"`` functions in multiple threads at + once. It can also be called recursively, in which case we must + ignore it. We also have to consider what occurs if several + different cffi-based extensions reach this code in parallel + threads---it is a different copy of the code, then, and we + can't have any shared global variable unless it comes from + 'libpythonX.Y.so'. + + Idea: + + * _cffi_carefully_make_gil(): "carefully" call + PyEval_InitThreads() (possibly with Py_InitializeEx() first). + + * then we use a (local) custom lock to make sure that a call to this + cffi-based extension will wait if another call to the *same* + extension is running the initialization in another thread. + It is reentrant, so that a recursive call will not block, but + only one from a different thread. + + * then we grab the GIL and (Python 2) we call Py_InitializeEx(). + At this point, concurrent calls to Py_InitializeEx() are not + possible: we have the GIL. + + * do the rest of the specific initialization, which may + temporarily release the GIL but not the custom lock. + Only release the custom lock when we are done. + */ + static char called = 0; + + if (_cffi_carefully_make_gil() != 0) + return NULL; + + _cffi_acquire_reentrant_mutex(); + + /* Here the GIL exists, but we don't have it. We're only protected + from concurrency by the reentrant mutex. */ + + /* This file only initializes the embedded module once, the first + time this is called, even if there are subinterpreters. */ + if (!called) { + called = 1; /* invoke _cffi_initialize_python() only once, + but don't set '_cffi_call_python' right now, + otherwise concurrent threads won't call + this function at all (we need them to wait) */ + if (_cffi_initialize_python() == 0) { + /* now initialization is finished. Switch to the fast-path. */ + + /* We would like nobody to see the new value of + '_cffi_call_python' without also seeing the rest of the + data initialized. However, this is not possible. But + the new value of '_cffi_call_python' is the function + 'cffi_call_python()' from _cffi_backend. So: */ + cffi_write_barrier(); + /* ^^^ we put a write barrier here, and a corresponding + read barrier at the start of cffi_call_python(). This + ensures that after that read barrier, we see everything + done here before the write barrier. + */ + + assert(_cffi_call_python_org != NULL); + _cffi_call_python = (_cffi_call_python_fnptr)_cffi_call_python_org; + } + else { + /* initialization failed. Reset this to NULL, even if it was + already set to some other value. Future calls to + _cffi_start_python() are still forced to occur, and will + always return NULL from now on. */ + _cffi_call_python_org = NULL; + } + } + + _cffi_release_reentrant_mutex(); + + return (_cffi_call_python_fnptr)_cffi_call_python_org; +} + +static +void _cffi_start_and_call_python(struct _cffi_externpy_s *externpy, char *args) +{ + _cffi_call_python_fnptr fnptr; + int current_err = errno; +#ifdef _MSC_VER + int current_lasterr = GetLastError(); +#endif + fnptr = _cffi_start_python(); + if (fnptr == NULL) { + fprintf(stderr, "function %s() called, but initialization code " + "failed. Returning 0.\n", externpy->name); + memset(args, 0, externpy->size_of_result); + } +#ifdef _MSC_VER + SetLastError(current_lasterr); +#endif + errno = current_err; + + if (fnptr != NULL) + fnptr(externpy, args); +} + + +/* The cffi_start_python() function makes sure Python is initialized + and our cffi module is set up. It can be called manually from the + user C code. The same effect is obtained automatically from any + dll-exported ``extern "Python"`` function. This function returns + -1 if initialization failed, 0 if all is OK. */ +_CFFI_UNUSED_FN +static int cffi_start_python(void) +{ + if (_cffi_call_python == &_cffi_start_and_call_python) { + if (_cffi_start_python() == NULL) + return -1; + } + cffi_read_barrier(); + return 0; +} + +#undef cffi_compare_and_swap +#undef cffi_write_barrier +#undef cffi_read_barrier + +#ifdef __cplusplus +} +#endif diff --git a/venv/lib/python3.12/site-packages/cffi/_imp_emulation.py b/venv/lib/python3.12/site-packages/cffi/_imp_emulation.py new file mode 100644 index 00000000..136abddd --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi/_imp_emulation.py @@ -0,0 +1,83 @@ + +try: + # this works on Python < 3.12 + from imp import * + +except ImportError: + # this is a limited emulation for Python >= 3.12. + # Note that this is used only for tests or for the old ffi.verify(). + # This is copied from the source code of Python 3.11. + + from _imp import (acquire_lock, release_lock, + is_builtin, is_frozen) + + from importlib._bootstrap import _load + + from importlib import machinery + import os + import sys + import tokenize + + SEARCH_ERROR = 0 + PY_SOURCE = 1 + PY_COMPILED = 2 + C_EXTENSION = 3 + PY_RESOURCE = 4 + PKG_DIRECTORY = 5 + C_BUILTIN = 6 + PY_FROZEN = 7 + PY_CODERESOURCE = 8 + IMP_HOOK = 9 + + def get_suffixes(): + extensions = [(s, 'rb', C_EXTENSION) + for s in machinery.EXTENSION_SUFFIXES] + source = [(s, 'r', PY_SOURCE) for s in machinery.SOURCE_SUFFIXES] + bytecode = [(s, 'rb', PY_COMPILED) for s in machinery.BYTECODE_SUFFIXES] + return extensions + source + bytecode + + def find_module(name, path=None): + if not isinstance(name, str): + raise TypeError("'name' must be a str, not {}".format(type(name))) + elif not isinstance(path, (type(None), list)): + # Backwards-compatibility + raise RuntimeError("'path' must be None or a list, " + "not {}".format(type(path))) + + if path is None: + if is_builtin(name): + return None, None, ('', '', C_BUILTIN) + elif is_frozen(name): + return None, None, ('', '', PY_FROZEN) + else: + path = sys.path + + for entry in path: + package_directory = os.path.join(entry, name) + for suffix in ['.py', machinery.BYTECODE_SUFFIXES[0]]: + package_file_name = '__init__' + suffix + file_path = os.path.join(package_directory, package_file_name) + if os.path.isfile(file_path): + return None, package_directory, ('', '', PKG_DIRECTORY) + for suffix, mode, type_ in get_suffixes(): + file_name = name + suffix + file_path = os.path.join(entry, file_name) + if os.path.isfile(file_path): + break + else: + continue + break # Break out of outer loop when breaking out of inner loop. + else: + raise ImportError(name, name=name) + + encoding = None + if 'b' not in mode: + with open(file_path, 'rb') as file: + encoding = tokenize.detect_encoding(file.readline)[0] + file = open(file_path, mode, encoding=encoding) + return file, file_path, (suffix, mode, type_) + + def load_dynamic(name, path, file=None): + loader = machinery.ExtensionFileLoader(name, path) + spec = machinery.ModuleSpec(name=name, loader=loader, origin=path) + return _load(spec) diff --git a/venv/lib/python3.12/site-packages/cffi/_shimmed_dist_utils.py b/venv/lib/python3.12/site-packages/cffi/_shimmed_dist_utils.py new file mode 100644 index 00000000..c3d23128 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi/_shimmed_dist_utils.py @@ -0,0 +1,45 @@ +""" +Temporary shim module to indirect the bits of distutils we need from setuptools/distutils while providing useful +error messages beyond `No module named 'distutils' on Python >= 3.12, or when setuptools' vendored distutils is broken. + +This is a compromise to avoid a hard-dep on setuptools for Python >= 3.12, since many users don't need runtime compilation support from CFFI. +""" +import sys + +try: + # import setuptools first; this is the most robust way to ensure its embedded distutils is available + # (the .pth shim should usually work, but this is even more robust) + import setuptools +except Exception as ex: + if sys.version_info >= (3, 12): + # Python 3.12 has no built-in distutils to fall back on, so any import problem is fatal + raise Exception("This CFFI feature requires setuptools on Python >= 3.12. The setuptools module is missing or non-functional.") from ex + + # silently ignore on older Pythons (support fallback to stdlib distutils where available) +else: + del setuptools + +try: + # bring in just the bits of distutils we need, whether they really came from setuptools or stdlib-embedded distutils + from distutils import log, sysconfig + from distutils.ccompiler import CCompiler + from distutils.command.build_ext import build_ext + from distutils.core import Distribution, Extension + from distutils.dir_util import mkpath + from distutils.errors import DistutilsSetupError, CompileError, LinkError + from distutils.log import set_threshold, set_verbosity + + if sys.platform == 'win32': + try: + # FUTURE: msvc9compiler module was removed in setuptools 74; consider removing, as it's only used by an ancient patch in `recompiler` + from distutils.msvc9compiler import MSVCCompiler + except ImportError: + MSVCCompiler = None +except Exception as ex: + if sys.version_info >= (3, 12): + raise Exception("This CFFI feature requires setuptools on Python >= 3.12. Please install the setuptools package.") from ex + + # anything older, just let the underlying distutils import error fly + raise Exception("This CFFI feature requires distutils. Please install the distutils or setuptools package.") from ex + +del sys diff --git a/venv/lib/python3.12/site-packages/cffi/api.py b/venv/lib/python3.12/site-packages/cffi/api.py new file mode 100644 index 00000000..5a474f3d --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi/api.py @@ -0,0 +1,967 @@ +import sys, types +from .lock import allocate_lock +from .error import CDefError +from . import model + +try: + callable +except NameError: + # Python 3.1 + from collections import Callable + callable = lambda x: isinstance(x, Callable) + +try: + basestring +except NameError: + # Python 3.x + basestring = str + +_unspecified = object() + + + +class FFI(object): + r''' + The main top-level class that you instantiate once, or once per module. + + Example usage: + + ffi = FFI() + ffi.cdef(""" + int printf(const char *, ...); + """) + + C = ffi.dlopen(None) # standard library + -or- + C = ffi.verify() # use a C compiler: verify the decl above is right + + C.printf("hello, %s!\n", ffi.new("char[]", "world")) + ''' + + def __init__(self, backend=None): + """Create an FFI instance. The 'backend' argument is used to + select a non-default backend, mostly for tests. + """ + if backend is None: + # You need PyPy (>= 2.0 beta), or a CPython (>= 2.6) with + # _cffi_backend.so compiled. + import _cffi_backend as backend + from . import __version__ + if backend.__version__ != __version__: + # bad version! Try to be as explicit as possible. + if hasattr(backend, '__file__'): + # CPython + raise Exception("Version mismatch: this is the 'cffi' package version %s, located in %r. When we import the top-level '_cffi_backend' extension module, we get version %s, located in %r. The two versions should be equal; check your installation." % ( + __version__, __file__, + backend.__version__, backend.__file__)) + else: + # PyPy + raise Exception("Version mismatch: this is the 'cffi' package version %s, located in %r. This interpreter comes with a built-in '_cffi_backend' module, which is version %s. The two versions should be equal; check your installation." % ( + __version__, __file__, backend.__version__)) + # (If you insist you can also try to pass the option + # 'backend=backend_ctypes.CTypesBackend()', but don't + # rely on it! It's probably not going to work well.) + + from . import cparser + self._backend = backend + self._lock = allocate_lock() + self._parser = cparser.Parser() + self._cached_btypes = {} + self._parsed_types = types.ModuleType('parsed_types').__dict__ + self._new_types = types.ModuleType('new_types').__dict__ + self._function_caches = [] + self._libraries = [] + self._cdefsources = [] + self._included_ffis = [] + self._windows_unicode = None + self._init_once_cache = {} + self._cdef_version = None + self._embedding = None + self._typecache = model.get_typecache(backend) + if hasattr(backend, 'set_ffi'): + backend.set_ffi(self) + for name in list(backend.__dict__): + if name.startswith('RTLD_'): + setattr(self, name, getattr(backend, name)) + # + with self._lock: + self.BVoidP = self._get_cached_btype(model.voidp_type) + self.BCharA = self._get_cached_btype(model.char_array_type) + if isinstance(backend, types.ModuleType): + # _cffi_backend: attach these constants to the class + if not hasattr(FFI, 'NULL'): + FFI.NULL = self.cast(self.BVoidP, 0) + FFI.CData, FFI.CType = backend._get_types() + else: + # ctypes backend: attach these constants to the instance + self.NULL = self.cast(self.BVoidP, 0) + self.CData, self.CType = backend._get_types() + self.buffer = backend.buffer + + def cdef(self, csource, override=False, packed=False, pack=None): + """Parse the given C source. This registers all declared functions, + types, and global variables. The functions and global variables can + then be accessed via either 'ffi.dlopen()' or 'ffi.verify()'. + The types can be used in 'ffi.new()' and other functions. + If 'packed' is specified as True, all structs declared inside this + cdef are packed, i.e. laid out without any field alignment at all. + Alternatively, 'pack' can be a small integer, and requests for + alignment greater than that are ignored (pack=1 is equivalent to + packed=True). + """ + self._cdef(csource, override=override, packed=packed, pack=pack) + + def embedding_api(self, csource, packed=False, pack=None): + self._cdef(csource, packed=packed, pack=pack, dllexport=True) + if self._embedding is None: + self._embedding = '' + + def _cdef(self, csource, override=False, **options): + if not isinstance(csource, str): # unicode, on Python 2 + if not isinstance(csource, basestring): + raise TypeError("cdef() argument must be a string") + csource = csource.encode('ascii') + with self._lock: + self._cdef_version = object() + self._parser.parse(csource, override=override, **options) + self._cdefsources.append(csource) + if override: + for cache in self._function_caches: + cache.clear() + finishlist = self._parser._recomplete + if finishlist: + self._parser._recomplete = [] + for tp in finishlist: + tp.finish_backend_type(self, finishlist) + + def dlopen(self, name, flags=0): + """Load and return a dynamic library identified by 'name'. + The standard C library can be loaded by passing None. + Note that functions and types declared by 'ffi.cdef()' are not + linked to a particular library, just like C headers; in the + library we only look for the actual (untyped) symbols. + """ + if not (isinstance(name, basestring) or + name is None or + isinstance(name, self.CData)): + raise TypeError("dlopen(name): name must be a file name, None, " + "or an already-opened 'void *' handle") + with self._lock: + lib, function_cache = _make_ffi_library(self, name, flags) + self._function_caches.append(function_cache) + self._libraries.append(lib) + return lib + + def dlclose(self, lib): + """Close a library obtained with ffi.dlopen(). After this call, + access to functions or variables from the library will fail + (possibly with a segmentation fault). + """ + type(lib).__cffi_close__(lib) + + def _typeof_locked(self, cdecl): + # call me with the lock! + key = cdecl + if key in self._parsed_types: + return self._parsed_types[key] + # + if not isinstance(cdecl, str): # unicode, on Python 2 + cdecl = cdecl.encode('ascii') + # + type = self._parser.parse_type(cdecl) + really_a_function_type = type.is_raw_function + if really_a_function_type: + type = type.as_function_pointer() + btype = self._get_cached_btype(type) + result = btype, really_a_function_type + self._parsed_types[key] = result + return result + + def _typeof(self, cdecl, consider_function_as_funcptr=False): + # string -> ctype object + try: + result = self._parsed_types[cdecl] + except KeyError: + with self._lock: + result = self._typeof_locked(cdecl) + # + btype, really_a_function_type = result + if really_a_function_type and not consider_function_as_funcptr: + raise CDefError("the type %r is a function type, not a " + "pointer-to-function type" % (cdecl,)) + return btype + + def typeof(self, cdecl): + """Parse the C type given as a string and return the + corresponding object. + It can also be used on 'cdata' instance to get its C type. + """ + if isinstance(cdecl, basestring): + return self._typeof(cdecl) + if isinstance(cdecl, self.CData): + return self._backend.typeof(cdecl) + if isinstance(cdecl, types.BuiltinFunctionType): + res = _builtin_function_type(cdecl) + if res is not None: + return res + if (isinstance(cdecl, types.FunctionType) + and hasattr(cdecl, '_cffi_base_type')): + with self._lock: + return self._get_cached_btype(cdecl._cffi_base_type) + raise TypeError(type(cdecl)) + + def sizeof(self, cdecl): + """Return the size in bytes of the argument. It can be a + string naming a C type, or a 'cdata' instance. + """ + if isinstance(cdecl, basestring): + BType = self._typeof(cdecl) + return self._backend.sizeof(BType) + else: + return self._backend.sizeof(cdecl) + + def alignof(self, cdecl): + """Return the natural alignment size in bytes of the C type + given as a string. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.alignof(cdecl) + + def offsetof(self, cdecl, *fields_or_indexes): + """Return the offset of the named field inside the given + structure or array, which must be given as a C type name. + You can give several field names in case of nested structures. + You can also give numeric values which correspond to array + items, in case of an array type. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._typeoffsetof(cdecl, *fields_or_indexes)[1] + + def new(self, cdecl, init=None): + """Allocate an instance according to the specified C type and + return a pointer to it. The specified C type must be either a + pointer or an array: ``new('X *')`` allocates an X and returns + a pointer to it, whereas ``new('X[n]')`` allocates an array of + n X'es and returns an array referencing it (which works + mostly like a pointer, like in C). You can also use + ``new('X[]', n)`` to allocate an array of a non-constant + length n. + + The memory is initialized following the rules of declaring a + global variable in C: by default it is zero-initialized, but + an explicit initializer can be given which can be used to + fill all or part of the memory. + + When the returned object goes out of scope, the memory + is freed. In other words the returned object has + ownership of the value of type 'cdecl' that it points to. This + means that the raw data can be used as long as this object is + kept alive, but must not be used for a longer time. Be careful + about that when copying the pointer to the memory somewhere + else, e.g. into another structure. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.newp(cdecl, init) + + def new_allocator(self, alloc=None, free=None, + should_clear_after_alloc=True): + """Return a new allocator, i.e. a function that behaves like ffi.new() + but uses the provided low-level 'alloc' and 'free' functions. + + 'alloc' is called with the size as argument. If it returns NULL, a + MemoryError is raised. 'free' is called with the result of 'alloc' + as argument. Both can be either Python function or directly C + functions. If 'free' is None, then no free function is called. + If both 'alloc' and 'free' are None, the default is used. + + If 'should_clear_after_alloc' is set to False, then the memory + returned by 'alloc' is assumed to be already cleared (or you are + fine with garbage); otherwise CFFI will clear it. + """ + compiled_ffi = self._backend.FFI() + allocator = compiled_ffi.new_allocator(alloc, free, + should_clear_after_alloc) + def allocate(cdecl, init=None): + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return allocator(cdecl, init) + return allocate + + def cast(self, cdecl, source): + """Similar to a C cast: returns an instance of the named C + type initialized with the given 'source'. The source is + casted between integers or pointers of any type. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.cast(cdecl, source) + + def string(self, cdata, maxlen=-1): + """Return a Python string (or unicode string) from the 'cdata'. + If 'cdata' is a pointer or array of characters or bytes, returns + the null-terminated string. The returned string extends until + the first null character, or at most 'maxlen' characters. If + 'cdata' is an array then 'maxlen' defaults to its length. + + If 'cdata' is a pointer or array of wchar_t, returns a unicode + string following the same rules. + + If 'cdata' is a single character or byte or a wchar_t, returns + it as a string or unicode string. + + If 'cdata' is an enum, returns the value of the enumerator as a + string, or 'NUMBER' if the value is out of range. + """ + return self._backend.string(cdata, maxlen) + + def unpack(self, cdata, length): + """Unpack an array of C data of the given length, + returning a Python string/unicode/list. + + If 'cdata' is a pointer to 'char', returns a byte string. + It does not stop at the first null. This is equivalent to: + ffi.buffer(cdata, length)[:] + + If 'cdata' is a pointer to 'wchar_t', returns a unicode string. + 'length' is measured in wchar_t's; it is not the size in bytes. + + If 'cdata' is a pointer to anything else, returns a list of + 'length' items. This is a faster equivalent to: + [cdata[i] for i in range(length)] + """ + return self._backend.unpack(cdata, length) + + #def buffer(self, cdata, size=-1): + # """Return a read-write buffer object that references the raw C data + # pointed to by the given 'cdata'. The 'cdata' must be a pointer or + # an array. Can be passed to functions expecting a buffer, or directly + # manipulated with: + # + # buf[:] get a copy of it in a regular string, or + # buf[idx] as a single character + # buf[:] = ... + # buf[idx] = ... change the content + # """ + # note that 'buffer' is a type, set on this instance by __init__ + + def from_buffer(self, cdecl, python_buffer=_unspecified, + require_writable=False): + """Return a cdata of the given type pointing to the data of the + given Python object, which must support the buffer interface. + Note that this is not meant to be used on the built-in types + str or unicode (you can build 'char[]' arrays explicitly) + but only on objects containing large quantities of raw data + in some other format, like 'array.array' or numpy arrays. + + The first argument is optional and default to 'char[]'. + """ + if python_buffer is _unspecified: + cdecl, python_buffer = self.BCharA, cdecl + elif isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.from_buffer(cdecl, python_buffer, + require_writable) + + def memmove(self, dest, src, n): + """ffi.memmove(dest, src, n) copies n bytes of memory from src to dest. + + Like the C function memmove(), the memory areas may overlap; + apart from that it behaves like the C function memcpy(). + + 'src' can be any cdata ptr or array, or any Python buffer object. + 'dest' can be any cdata ptr or array, or a writable Python buffer + object. The size to copy, 'n', is always measured in bytes. + + Unlike other methods, this one supports all Python buffer including + byte strings and bytearrays---but it still does not support + non-contiguous buffers. + """ + return self._backend.memmove(dest, src, n) + + def callback(self, cdecl, python_callable=None, error=None, onerror=None): + """Return a callback object or a decorator making such a + callback object. 'cdecl' must name a C function pointer type. + The callback invokes the specified 'python_callable' (which may + be provided either directly or via a decorator). Important: the + callback object must be manually kept alive for as long as the + callback may be invoked from the C level. + """ + def callback_decorator_wrap(python_callable): + if not callable(python_callable): + raise TypeError("the 'python_callable' argument " + "is not callable") + return self._backend.callback(cdecl, python_callable, + error, onerror) + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl, consider_function_as_funcptr=True) + if python_callable is None: + return callback_decorator_wrap # decorator mode + else: + return callback_decorator_wrap(python_callable) # direct mode + + def getctype(self, cdecl, replace_with=''): + """Return a string giving the C type 'cdecl', which may be itself + a string or a object. If 'replace_with' is given, it gives + extra text to append (or insert for more complicated C types), like + a variable name, or '*' to get actually the C type 'pointer-to-cdecl'. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + replace_with = replace_with.strip() + if (replace_with.startswith('*') + and '&[' in self._backend.getcname(cdecl, '&')): + replace_with = '(%s)' % replace_with + elif replace_with and not replace_with[0] in '[(': + replace_with = ' ' + replace_with + return self._backend.getcname(cdecl, replace_with) + + def gc(self, cdata, destructor, size=0): + """Return a new cdata object that points to the same + data. Later, when this new cdata object is garbage-collected, + 'destructor(old_cdata_object)' will be called. + + The optional 'size' gives an estimate of the size, used to + trigger the garbage collection more eagerly. So far only used + on PyPy. It tells the GC that the returned object keeps alive + roughly 'size' bytes of external memory. + """ + return self._backend.gcp(cdata, destructor, size) + + def _get_cached_btype(self, type): + assert self._lock.acquire(False) is False + # call me with the lock! + try: + BType = self._cached_btypes[type] + except KeyError: + finishlist = [] + BType = type.get_cached_btype(self, finishlist) + for type in finishlist: + type.finish_backend_type(self, finishlist) + return BType + + def verify(self, source='', tmpdir=None, **kwargs): + """Verify that the current ffi signatures compile on this + machine, and return a dynamic library object. The dynamic + library can be used to call functions and access global + variables declared in this 'ffi'. The library is compiled + by the C compiler: it gives you C-level API compatibility + (including calling macros). This is unlike 'ffi.dlopen()', + which requires binary compatibility in the signatures. + """ + from .verifier import Verifier, _caller_dir_pycache + # + # If set_unicode(True) was called, insert the UNICODE and + # _UNICODE macro declarations + if self._windows_unicode: + self._apply_windows_unicode(kwargs) + # + # Set the tmpdir here, and not in Verifier.__init__: it picks + # up the caller's directory, which we want to be the caller of + # ffi.verify(), as opposed to the caller of Veritier(). + tmpdir = tmpdir or _caller_dir_pycache() + # + # Make a Verifier() and use it to load the library. + self.verifier = Verifier(self, source, tmpdir, **kwargs) + lib = self.verifier.load_library() + # + # Save the loaded library for keep-alive purposes, even + # if the caller doesn't keep it alive itself (it should). + self._libraries.append(lib) + return lib + + def _get_errno(self): + return self._backend.get_errno() + def _set_errno(self, errno): + self._backend.set_errno(errno) + errno = property(_get_errno, _set_errno, None, + "the value of 'errno' from/to the C calls") + + def getwinerror(self, code=-1): + return self._backend.getwinerror(code) + + def _pointer_to(self, ctype): + with self._lock: + return model.pointer_cache(self, ctype) + + def addressof(self, cdata, *fields_or_indexes): + """Return the address of a . + If 'fields_or_indexes' are given, returns the address of that + field or array item in the structure or array, recursively in + case of nested structures. + """ + try: + ctype = self._backend.typeof(cdata) + except TypeError: + if '__addressof__' in type(cdata).__dict__: + return type(cdata).__addressof__(cdata, *fields_or_indexes) + raise + if fields_or_indexes: + ctype, offset = self._typeoffsetof(ctype, *fields_or_indexes) + else: + if ctype.kind == "pointer": + raise TypeError("addressof(pointer)") + offset = 0 + ctypeptr = self._pointer_to(ctype) + return self._backend.rawaddressof(ctypeptr, cdata, offset) + + def _typeoffsetof(self, ctype, field_or_index, *fields_or_indexes): + ctype, offset = self._backend.typeoffsetof(ctype, field_or_index) + for field1 in fields_or_indexes: + ctype, offset1 = self._backend.typeoffsetof(ctype, field1, 1) + offset += offset1 + return ctype, offset + + def include(self, ffi_to_include): + """Includes the typedefs, structs, unions and enums defined + in another FFI instance. Usage is similar to a #include in C, + where a part of the program might include types defined in + another part for its own usage. Note that the include() + method has no effect on functions, constants and global + variables, which must anyway be accessed directly from the + lib object returned by the original FFI instance. + """ + if not isinstance(ffi_to_include, FFI): + raise TypeError("ffi.include() expects an argument that is also of" + " type cffi.FFI, not %r" % ( + type(ffi_to_include).__name__,)) + if ffi_to_include is self: + raise ValueError("self.include(self)") + with ffi_to_include._lock: + with self._lock: + self._parser.include(ffi_to_include._parser) + self._cdefsources.append('[') + self._cdefsources.extend(ffi_to_include._cdefsources) + self._cdefsources.append(']') + self._included_ffis.append(ffi_to_include) + + def new_handle(self, x): + return self._backend.newp_handle(self.BVoidP, x) + + def from_handle(self, x): + return self._backend.from_handle(x) + + def release(self, x): + self._backend.release(x) + + def set_unicode(self, enabled_flag): + """Windows: if 'enabled_flag' is True, enable the UNICODE and + _UNICODE defines in C, and declare the types like TCHAR and LPTCSTR + to be (pointers to) wchar_t. If 'enabled_flag' is False, + declare these types to be (pointers to) plain 8-bit characters. + This is mostly for backward compatibility; you usually want True. + """ + if self._windows_unicode is not None: + raise ValueError("set_unicode() can only be called once") + enabled_flag = bool(enabled_flag) + if enabled_flag: + self.cdef("typedef wchar_t TBYTE;" + "typedef wchar_t TCHAR;" + "typedef const wchar_t *LPCTSTR;" + "typedef const wchar_t *PCTSTR;" + "typedef wchar_t *LPTSTR;" + "typedef wchar_t *PTSTR;" + "typedef TBYTE *PTBYTE;" + "typedef TCHAR *PTCHAR;") + else: + self.cdef("typedef char TBYTE;" + "typedef char TCHAR;" + "typedef const char *LPCTSTR;" + "typedef const char *PCTSTR;" + "typedef char *LPTSTR;" + "typedef char *PTSTR;" + "typedef TBYTE *PTBYTE;" + "typedef TCHAR *PTCHAR;") + self._windows_unicode = enabled_flag + + def _apply_windows_unicode(self, kwds): + defmacros = kwds.get('define_macros', ()) + if not isinstance(defmacros, (list, tuple)): + raise TypeError("'define_macros' must be a list or tuple") + defmacros = list(defmacros) + [('UNICODE', '1'), + ('_UNICODE', '1')] + kwds['define_macros'] = defmacros + + def _apply_embedding_fix(self, kwds): + # must include an argument like "-lpython2.7" for the compiler + def ensure(key, value): + lst = kwds.setdefault(key, []) + if value not in lst: + lst.append(value) + # + if '__pypy__' in sys.builtin_module_names: + import os + if sys.platform == "win32": + # we need 'libpypy-c.lib'. Current distributions of + # pypy (>= 4.1) contain it as 'libs/python27.lib'. + pythonlib = "python{0[0]}{0[1]}".format(sys.version_info) + if hasattr(sys, 'prefix'): + ensure('library_dirs', os.path.join(sys.prefix, 'libs')) + else: + # we need 'libpypy-c.{so,dylib}', which should be by + # default located in 'sys.prefix/bin' for installed + # systems. + if sys.version_info < (3,): + pythonlib = "pypy-c" + else: + pythonlib = "pypy3-c" + if hasattr(sys, 'prefix'): + ensure('library_dirs', os.path.join(sys.prefix, 'bin')) + # On uninstalled pypy's, the libpypy-c is typically found in + # .../pypy/goal/. + if hasattr(sys, 'prefix'): + ensure('library_dirs', os.path.join(sys.prefix, 'pypy', 'goal')) + else: + if sys.platform == "win32": + template = "python%d%d" + if hasattr(sys, 'gettotalrefcount'): + template += '_d' + else: + try: + import sysconfig + except ImportError: # 2.6 + from cffi._shimmed_dist_utils import sysconfig + template = "python%d.%d" + if sysconfig.get_config_var('DEBUG_EXT'): + template += sysconfig.get_config_var('DEBUG_EXT') + pythonlib = (template % + (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff)) + if hasattr(sys, 'abiflags'): + pythonlib += sys.abiflags + ensure('libraries', pythonlib) + if sys.platform == "win32": + ensure('extra_link_args', '/MANIFEST') + + def set_source(self, module_name, source, source_extension='.c', **kwds): + import os + if hasattr(self, '_assigned_source'): + raise ValueError("set_source() cannot be called several times " + "per ffi object") + if not isinstance(module_name, basestring): + raise TypeError("'module_name' must be a string") + if os.sep in module_name or (os.altsep and os.altsep in module_name): + raise ValueError("'module_name' must not contain '/': use a dotted " + "name to make a 'package.module' location") + self._assigned_source = (str(module_name), source, + source_extension, kwds) + + def set_source_pkgconfig(self, module_name, pkgconfig_libs, source, + source_extension='.c', **kwds): + from . import pkgconfig + if not isinstance(pkgconfig_libs, list): + raise TypeError("the pkgconfig_libs argument must be a list " + "of package names") + kwds2 = pkgconfig.flags_from_pkgconfig(pkgconfig_libs) + pkgconfig.merge_flags(kwds, kwds2) + self.set_source(module_name, source, source_extension, **kwds) + + def distutils_extension(self, tmpdir='build', verbose=True): + from cffi._shimmed_dist_utils import mkpath + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + if hasattr(self, 'verifier'): # fallback, 'tmpdir' ignored + return self.verifier.get_extension() + raise ValueError("set_source() must be called before" + " distutils_extension()") + module_name, source, source_extension, kwds = self._assigned_source + if source is None: + raise TypeError("distutils_extension() is only for C extension " + "modules, not for dlopen()-style pure Python " + "modules") + mkpath(tmpdir) + ext, updated = recompile(self, module_name, + source, tmpdir=tmpdir, extradir=tmpdir, + source_extension=source_extension, + call_c_compiler=False, **kwds) + if verbose: + if updated: + sys.stderr.write("regenerated: %r\n" % (ext.sources[0],)) + else: + sys.stderr.write("not modified: %r\n" % (ext.sources[0],)) + return ext + + def emit_c_code(self, filename): + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + raise ValueError("set_source() must be called before emit_c_code()") + module_name, source, source_extension, kwds = self._assigned_source + if source is None: + raise TypeError("emit_c_code() is only for C extension modules, " + "not for dlopen()-style pure Python modules") + recompile(self, module_name, source, + c_file=filename, call_c_compiler=False, + uses_ffiplatform=False, **kwds) + + def emit_python_code(self, filename): + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + raise ValueError("set_source() must be called before emit_c_code()") + module_name, source, source_extension, kwds = self._assigned_source + if source is not None: + raise TypeError("emit_python_code() is only for dlopen()-style " + "pure Python modules, not for C extension modules") + recompile(self, module_name, source, + c_file=filename, call_c_compiler=False, + uses_ffiplatform=False, **kwds) + + def compile(self, tmpdir='.', verbose=0, target=None, debug=None): + """The 'target' argument gives the final file name of the + compiled DLL. Use '*' to force distutils' choice, suitable for + regular CPython C API modules. Use a file name ending in '.*' + to ask for the system's default extension for dynamic libraries + (.so/.dll/.dylib). + + The default is '*' when building a non-embedded C API extension, + and (module_name + '.*') when building an embedded library. + """ + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + raise ValueError("set_source() must be called before compile()") + module_name, source, source_extension, kwds = self._assigned_source + return recompile(self, module_name, source, tmpdir=tmpdir, + target=target, source_extension=source_extension, + compiler_verbose=verbose, debug=debug, **kwds) + + def init_once(self, func, tag): + # Read _init_once_cache[tag], which is either (False, lock) if + # we're calling the function now in some thread, or (True, result). + # Don't call setdefault() in most cases, to avoid allocating and + # immediately freeing a lock; but still use setdefaut() to avoid + # races. + try: + x = self._init_once_cache[tag] + except KeyError: + x = self._init_once_cache.setdefault(tag, (False, allocate_lock())) + # Common case: we got (True, result), so we return the result. + if x[0]: + return x[1] + # Else, it's a lock. Acquire it to serialize the following tests. + with x[1]: + # Read again from _init_once_cache the current status. + x = self._init_once_cache[tag] + if x[0]: + return x[1] + # Call the function and store the result back. + result = func() + self._init_once_cache[tag] = (True, result) + return result + + def embedding_init_code(self, pysource): + if self._embedding: + raise ValueError("embedding_init_code() can only be called once") + # fix 'pysource' before it gets dumped into the C file: + # - remove empty lines at the beginning, so it starts at "line 1" + # - dedent, if all non-empty lines are indented + # - check for SyntaxErrors + import re + match = re.match(r'\s*\n', pysource) + if match: + pysource = pysource[match.end():] + lines = pysource.splitlines() or [''] + prefix = re.match(r'\s*', lines[0]).group() + for i in range(1, len(lines)): + line = lines[i] + if line.rstrip(): + while not line.startswith(prefix): + prefix = prefix[:-1] + i = len(prefix) + lines = [line[i:]+'\n' for line in lines] + pysource = ''.join(lines) + # + compile(pysource, "cffi_init", "exec") + # + self._embedding = pysource + + def def_extern(self, *args, **kwds): + raise ValueError("ffi.def_extern() is only available on API-mode FFI " + "objects") + + def list_types(self): + """Returns the user type names known to this FFI instance. + This returns a tuple containing three lists of names: + (typedef_names, names_of_structs, names_of_unions) + """ + typedefs = [] + structs = [] + unions = [] + for key in self._parser._declarations: + if key.startswith('typedef '): + typedefs.append(key[8:]) + elif key.startswith('struct '): + structs.append(key[7:]) + elif key.startswith('union '): + unions.append(key[6:]) + typedefs.sort() + structs.sort() + unions.sort() + return (typedefs, structs, unions) + + +def _load_backend_lib(backend, name, flags): + import os + if not isinstance(name, basestring): + if sys.platform != "win32" or name is not None: + return backend.load_library(name, flags) + name = "c" # Windows: load_library(None) fails, but this works + # on Python 2 (backward compatibility hack only) + first_error = None + if '.' in name or '/' in name or os.sep in name: + try: + return backend.load_library(name, flags) + except OSError as e: + first_error = e + import ctypes.util + path = ctypes.util.find_library(name) + if path is None: + if name == "c" and sys.platform == "win32" and sys.version_info >= (3,): + raise OSError("dlopen(None) cannot work on Windows for Python 3 " + "(see http://bugs.python.org/issue23606)") + msg = ("ctypes.util.find_library() did not manage " + "to locate a library called %r" % (name,)) + if first_error is not None: + msg = "%s. Additionally, %s" % (first_error, msg) + raise OSError(msg) + return backend.load_library(path, flags) + +def _make_ffi_library(ffi, libname, flags): + backend = ffi._backend + backendlib = _load_backend_lib(backend, libname, flags) + # + def accessor_function(name): + key = 'function ' + name + tp, _ = ffi._parser._declarations[key] + BType = ffi._get_cached_btype(tp) + value = backendlib.load_function(BType, name) + library.__dict__[name] = value + # + def accessor_variable(name): + key = 'variable ' + name + tp, _ = ffi._parser._declarations[key] + BType = ffi._get_cached_btype(tp) + read_variable = backendlib.read_variable + write_variable = backendlib.write_variable + setattr(FFILibrary, name, property( + lambda self: read_variable(BType, name), + lambda self, value: write_variable(BType, name, value))) + # + def addressof_var(name): + try: + return addr_variables[name] + except KeyError: + with ffi._lock: + if name not in addr_variables: + key = 'variable ' + name + tp, _ = ffi._parser._declarations[key] + BType = ffi._get_cached_btype(tp) + if BType.kind != 'array': + BType = model.pointer_cache(ffi, BType) + p = backendlib.load_function(BType, name) + addr_variables[name] = p + return addr_variables[name] + # + def accessor_constant(name): + raise NotImplementedError("non-integer constant '%s' cannot be " + "accessed from a dlopen() library" % (name,)) + # + def accessor_int_constant(name): + library.__dict__[name] = ffi._parser._int_constants[name] + # + accessors = {} + accessors_version = [False] + addr_variables = {} + # + def update_accessors(): + if accessors_version[0] is ffi._cdef_version: + return + # + for key, (tp, _) in ffi._parser._declarations.items(): + if not isinstance(tp, model.EnumType): + tag, name = key.split(' ', 1) + if tag == 'function': + accessors[name] = accessor_function + elif tag == 'variable': + accessors[name] = accessor_variable + elif tag == 'constant': + accessors[name] = accessor_constant + else: + for i, enumname in enumerate(tp.enumerators): + def accessor_enum(name, tp=tp, i=i): + tp.check_not_partial() + library.__dict__[name] = tp.enumvalues[i] + accessors[enumname] = accessor_enum + for name in ffi._parser._int_constants: + accessors.setdefault(name, accessor_int_constant) + accessors_version[0] = ffi._cdef_version + # + def make_accessor(name): + with ffi._lock: + if name in library.__dict__ or name in FFILibrary.__dict__: + return # added by another thread while waiting for the lock + if name not in accessors: + update_accessors() + if name not in accessors: + raise AttributeError(name) + accessors[name](name) + # + class FFILibrary(object): + def __getattr__(self, name): + make_accessor(name) + return getattr(self, name) + def __setattr__(self, name, value): + try: + property = getattr(self.__class__, name) + except AttributeError: + make_accessor(name) + setattr(self, name, value) + else: + property.__set__(self, value) + def __dir__(self): + with ffi._lock: + update_accessors() + return accessors.keys() + def __addressof__(self, name): + if name in library.__dict__: + return library.__dict__[name] + if name in FFILibrary.__dict__: + return addressof_var(name) + make_accessor(name) + if name in library.__dict__: + return library.__dict__[name] + if name in FFILibrary.__dict__: + return addressof_var(name) + raise AttributeError("cffi library has no function or " + "global variable named '%s'" % (name,)) + def __cffi_close__(self): + backendlib.close_lib() + self.__dict__.clear() + # + if isinstance(libname, basestring): + try: + if not isinstance(libname, str): # unicode, on Python 2 + libname = libname.encode('utf-8') + FFILibrary.__name__ = 'FFILibrary_%s' % libname + except UnicodeError: + pass + library = FFILibrary() + return library, library.__dict__ + +def _builtin_function_type(func): + # a hack to make at least ffi.typeof(builtin_function) work, + # if the builtin function was obtained by 'vengine_cpy'. + import sys + try: + module = sys.modules[func.__module__] + ffi = module._cffi_original_ffi + types_of_builtin_funcs = module._cffi_types_of_builtin_funcs + tp = types_of_builtin_funcs[func] + except (KeyError, AttributeError, TypeError): + return None + else: + with ffi._lock: + return ffi._get_cached_btype(tp) diff --git a/venv/lib/python3.12/site-packages/cffi/backend_ctypes.py b/venv/lib/python3.12/site-packages/cffi/backend_ctypes.py new file mode 100644 index 00000000..e7956a79 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi/backend_ctypes.py @@ -0,0 +1,1121 @@ +import ctypes, ctypes.util, operator, sys +from . import model + +if sys.version_info < (3,): + bytechr = chr +else: + unicode = str + long = int + xrange = range + bytechr = lambda num: bytes([num]) + +class CTypesType(type): + pass + +class CTypesData(object): + __metaclass__ = CTypesType + __slots__ = ['__weakref__'] + __name__ = '' + + def __init__(self, *args): + raise TypeError("cannot instantiate %r" % (self.__class__,)) + + @classmethod + def _newp(cls, init): + raise TypeError("expected a pointer or array ctype, got '%s'" + % (cls._get_c_name(),)) + + @staticmethod + def _to_ctypes(value): + raise TypeError + + @classmethod + def _arg_to_ctypes(cls, *value): + try: + ctype = cls._ctype + except AttributeError: + raise TypeError("cannot create an instance of %r" % (cls,)) + if value: + res = cls._to_ctypes(*value) + if not isinstance(res, ctype): + res = cls._ctype(res) + else: + res = cls._ctype() + return res + + @classmethod + def _create_ctype_obj(cls, init): + if init is None: + return cls._arg_to_ctypes() + else: + return cls._arg_to_ctypes(init) + + @staticmethod + def _from_ctypes(ctypes_value): + raise TypeError + + @classmethod + def _get_c_name(cls, replace_with=''): + return cls._reftypename.replace(' &', replace_with) + + @classmethod + def _fix_class(cls): + cls.__name__ = 'CData<%s>' % (cls._get_c_name(),) + cls.__qualname__ = 'CData<%s>' % (cls._get_c_name(),) + cls.__module__ = 'ffi' + + def _get_own_repr(self): + raise NotImplementedError + + def _addr_repr(self, address): + if address == 0: + return 'NULL' + else: + if address < 0: + address += 1 << (8*ctypes.sizeof(ctypes.c_void_p)) + return '0x%x' % address + + def __repr__(self, c_name=None): + own = self._get_own_repr() + return '' % (c_name or self._get_c_name(), own) + + def _convert_to_address(self, BClass): + if BClass is None: + raise TypeError("cannot convert %r to an address" % ( + self._get_c_name(),)) + else: + raise TypeError("cannot convert %r to %r" % ( + self._get_c_name(), BClass._get_c_name())) + + @classmethod + def _get_size(cls): + return ctypes.sizeof(cls._ctype) + + def _get_size_of_instance(self): + return ctypes.sizeof(self._ctype) + + @classmethod + def _cast_from(cls, source): + raise TypeError("cannot cast to %r" % (cls._get_c_name(),)) + + def _cast_to_integer(self): + return self._convert_to_address(None) + + @classmethod + def _alignment(cls): + return ctypes.alignment(cls._ctype) + + def __iter__(self): + raise TypeError("cdata %r does not support iteration" % ( + self._get_c_name()),) + + def _make_cmp(name): + cmpfunc = getattr(operator, name) + def cmp(self, other): + v_is_ptr = not isinstance(self, CTypesGenericPrimitive) + w_is_ptr = (isinstance(other, CTypesData) and + not isinstance(other, CTypesGenericPrimitive)) + if v_is_ptr and w_is_ptr: + return cmpfunc(self._convert_to_address(None), + other._convert_to_address(None)) + elif v_is_ptr or w_is_ptr: + return NotImplemented + else: + if isinstance(self, CTypesGenericPrimitive): + self = self._value + if isinstance(other, CTypesGenericPrimitive): + other = other._value + return cmpfunc(self, other) + cmp.func_name = name + return cmp + + __eq__ = _make_cmp('__eq__') + __ne__ = _make_cmp('__ne__') + __lt__ = _make_cmp('__lt__') + __le__ = _make_cmp('__le__') + __gt__ = _make_cmp('__gt__') + __ge__ = _make_cmp('__ge__') + + def __hash__(self): + return hash(self._convert_to_address(None)) + + def _to_string(self, maxlen): + raise TypeError("string(): %r" % (self,)) + + +class CTypesGenericPrimitive(CTypesData): + __slots__ = [] + + def __hash__(self): + return hash(self._value) + + def _get_own_repr(self): + return repr(self._from_ctypes(self._value)) + + +class CTypesGenericArray(CTypesData): + __slots__ = [] + + @classmethod + def _newp(cls, init): + return cls(init) + + def __iter__(self): + for i in xrange(len(self)): + yield self[i] + + def _get_own_repr(self): + return self._addr_repr(ctypes.addressof(self._blob)) + + +class CTypesGenericPtr(CTypesData): + __slots__ = ['_address', '_as_ctype_ptr'] + _automatic_casts = False + kind = "pointer" + + @classmethod + def _newp(cls, init): + return cls(init) + + @classmethod + def _cast_from(cls, source): + if source is None: + address = 0 + elif isinstance(source, CTypesData): + address = source._cast_to_integer() + elif isinstance(source, (int, long)): + address = source + else: + raise TypeError("bad type for cast to %r: %r" % + (cls, type(source).__name__)) + return cls._new_pointer_at(address) + + @classmethod + def _new_pointer_at(cls, address): + self = cls.__new__(cls) + self._address = address + self._as_ctype_ptr = ctypes.cast(address, cls._ctype) + return self + + def _get_own_repr(self): + try: + return self._addr_repr(self._address) + except AttributeError: + return '???' + + def _cast_to_integer(self): + return self._address + + def __nonzero__(self): + return bool(self._address) + __bool__ = __nonzero__ + + @classmethod + def _to_ctypes(cls, value): + if not isinstance(value, CTypesData): + raise TypeError("unexpected %s object" % type(value).__name__) + address = value._convert_to_address(cls) + return ctypes.cast(address, cls._ctype) + + @classmethod + def _from_ctypes(cls, ctypes_ptr): + address = ctypes.cast(ctypes_ptr, ctypes.c_void_p).value or 0 + return cls._new_pointer_at(address) + + @classmethod + def _initialize(cls, ctypes_ptr, value): + if value: + ctypes_ptr.contents = cls._to_ctypes(value).contents + + def _convert_to_address(self, BClass): + if (BClass in (self.__class__, None) or BClass._automatic_casts + or self._automatic_casts): + return self._address + else: + return CTypesData._convert_to_address(self, BClass) + + +class CTypesBaseStructOrUnion(CTypesData): + __slots__ = ['_blob'] + + @classmethod + def _create_ctype_obj(cls, init): + # may be overridden + raise TypeError("cannot instantiate opaque type %s" % (cls,)) + + def _get_own_repr(self): + return self._addr_repr(ctypes.addressof(self._blob)) + + @classmethod + def _offsetof(cls, fieldname): + return getattr(cls._ctype, fieldname).offset + + def _convert_to_address(self, BClass): + if getattr(BClass, '_BItem', None) is self.__class__: + return ctypes.addressof(self._blob) + else: + return CTypesData._convert_to_address(self, BClass) + + @classmethod + def _from_ctypes(cls, ctypes_struct_or_union): + self = cls.__new__(cls) + self._blob = ctypes_struct_or_union + return self + + @classmethod + def _to_ctypes(cls, value): + return value._blob + + def __repr__(self, c_name=None): + return CTypesData.__repr__(self, c_name or self._get_c_name(' &')) + + +class CTypesBackend(object): + + PRIMITIVE_TYPES = { + 'char': ctypes.c_char, + 'short': ctypes.c_short, + 'int': ctypes.c_int, + 'long': ctypes.c_long, + 'long long': ctypes.c_longlong, + 'signed char': ctypes.c_byte, + 'unsigned char': ctypes.c_ubyte, + 'unsigned short': ctypes.c_ushort, + 'unsigned int': ctypes.c_uint, + 'unsigned long': ctypes.c_ulong, + 'unsigned long long': ctypes.c_ulonglong, + 'float': ctypes.c_float, + 'double': ctypes.c_double, + '_Bool': ctypes.c_bool, + } + + for _name in ['unsigned long long', 'unsigned long', + 'unsigned int', 'unsigned short', 'unsigned char']: + _size = ctypes.sizeof(PRIMITIVE_TYPES[_name]) + PRIMITIVE_TYPES['uint%d_t' % (8*_size)] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_void_p): + PRIMITIVE_TYPES['uintptr_t'] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_size_t): + PRIMITIVE_TYPES['size_t'] = PRIMITIVE_TYPES[_name] + + for _name in ['long long', 'long', 'int', 'short', 'signed char']: + _size = ctypes.sizeof(PRIMITIVE_TYPES[_name]) + PRIMITIVE_TYPES['int%d_t' % (8*_size)] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_void_p): + PRIMITIVE_TYPES['intptr_t'] = PRIMITIVE_TYPES[_name] + PRIMITIVE_TYPES['ptrdiff_t'] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_size_t): + PRIMITIVE_TYPES['ssize_t'] = PRIMITIVE_TYPES[_name] + + + def __init__(self): + self.RTLD_LAZY = 0 # not supported anyway by ctypes + self.RTLD_NOW = 0 + self.RTLD_GLOBAL = ctypes.RTLD_GLOBAL + self.RTLD_LOCAL = ctypes.RTLD_LOCAL + + def set_ffi(self, ffi): + self.ffi = ffi + + def _get_types(self): + return CTypesData, CTypesType + + def load_library(self, path, flags=0): + cdll = ctypes.CDLL(path, flags) + return CTypesLibrary(self, cdll) + + def new_void_type(self): + class CTypesVoid(CTypesData): + __slots__ = [] + _reftypename = 'void &' + @staticmethod + def _from_ctypes(novalue): + return None + @staticmethod + def _to_ctypes(novalue): + if novalue is not None: + raise TypeError("None expected, got %s object" % + (type(novalue).__name__,)) + return None + CTypesVoid._fix_class() + return CTypesVoid + + def new_primitive_type(self, name): + if name == 'wchar_t': + raise NotImplementedError(name) + ctype = self.PRIMITIVE_TYPES[name] + if name == 'char': + kind = 'char' + elif name in ('float', 'double'): + kind = 'float' + else: + if name in ('signed char', 'unsigned char'): + kind = 'byte' + elif name == '_Bool': + kind = 'bool' + else: + kind = 'int' + is_signed = (ctype(-1).value == -1) + # + def _cast_source_to_int(source): + if isinstance(source, (int, long, float)): + source = int(source) + elif isinstance(source, CTypesData): + source = source._cast_to_integer() + elif isinstance(source, bytes): + source = ord(source) + elif source is None: + source = 0 + else: + raise TypeError("bad type for cast to %r: %r" % + (CTypesPrimitive, type(source).__name__)) + return source + # + kind1 = kind + class CTypesPrimitive(CTypesGenericPrimitive): + __slots__ = ['_value'] + _ctype = ctype + _reftypename = '%s &' % name + kind = kind1 + + def __init__(self, value): + self._value = value + + @staticmethod + def _create_ctype_obj(init): + if init is None: + return ctype() + return ctype(CTypesPrimitive._to_ctypes(init)) + + if kind == 'int' or kind == 'byte': + @classmethod + def _cast_from(cls, source): + source = _cast_source_to_int(source) + source = ctype(source).value # cast within range + return cls(source) + def __int__(self): + return self._value + + if kind == 'bool': + @classmethod + def _cast_from(cls, source): + if not isinstance(source, (int, long, float)): + source = _cast_source_to_int(source) + return cls(bool(source)) + def __int__(self): + return int(self._value) + + if kind == 'char': + @classmethod + def _cast_from(cls, source): + source = _cast_source_to_int(source) + source = bytechr(source & 0xFF) + return cls(source) + def __int__(self): + return ord(self._value) + + if kind == 'float': + @classmethod + def _cast_from(cls, source): + if isinstance(source, float): + pass + elif isinstance(source, CTypesGenericPrimitive): + if hasattr(source, '__float__'): + source = float(source) + else: + source = int(source) + else: + source = _cast_source_to_int(source) + source = ctype(source).value # fix precision + return cls(source) + def __int__(self): + return int(self._value) + def __float__(self): + return self._value + + _cast_to_integer = __int__ + + if kind == 'int' or kind == 'byte' or kind == 'bool': + @staticmethod + def _to_ctypes(x): + if not isinstance(x, (int, long)): + if isinstance(x, CTypesData): + x = int(x) + else: + raise TypeError("integer expected, got %s" % + type(x).__name__) + if ctype(x).value != x: + if not is_signed and x < 0: + raise OverflowError("%s: negative integer" % name) + else: + raise OverflowError("%s: integer out of bounds" + % name) + return x + + if kind == 'char': + @staticmethod + def _to_ctypes(x): + if isinstance(x, bytes) and len(x) == 1: + return x + if isinstance(x, CTypesPrimitive): # > + return x._value + raise TypeError("character expected, got %s" % + type(x).__name__) + def __nonzero__(self): + return ord(self._value) != 0 + else: + def __nonzero__(self): + return self._value != 0 + __bool__ = __nonzero__ + + if kind == 'float': + @staticmethod + def _to_ctypes(x): + if not isinstance(x, (int, long, float, CTypesData)): + raise TypeError("float expected, got %s" % + type(x).__name__) + return ctype(x).value + + @staticmethod + def _from_ctypes(value): + return getattr(value, 'value', value) + + @staticmethod + def _initialize(blob, init): + blob.value = CTypesPrimitive._to_ctypes(init) + + if kind == 'char': + def _to_string(self, maxlen): + return self._value + if kind == 'byte': + def _to_string(self, maxlen): + return chr(self._value & 0xff) + # + CTypesPrimitive._fix_class() + return CTypesPrimitive + + def new_pointer_type(self, BItem): + getbtype = self.ffi._get_cached_btype + if BItem is getbtype(model.PrimitiveType('char')): + kind = 'charp' + elif BItem in (getbtype(model.PrimitiveType('signed char')), + getbtype(model.PrimitiveType('unsigned char'))): + kind = 'bytep' + elif BItem is getbtype(model.void_type): + kind = 'voidp' + else: + kind = 'generic' + # + class CTypesPtr(CTypesGenericPtr): + __slots__ = ['_own'] + if kind == 'charp': + __slots__ += ['__as_strbuf'] + _BItem = BItem + if hasattr(BItem, '_ctype'): + _ctype = ctypes.POINTER(BItem._ctype) + _bitem_size = ctypes.sizeof(BItem._ctype) + else: + _ctype = ctypes.c_void_p + if issubclass(BItem, CTypesGenericArray): + _reftypename = BItem._get_c_name('(* &)') + else: + _reftypename = BItem._get_c_name(' * &') + + def __init__(self, init): + ctypeobj = BItem._create_ctype_obj(init) + if kind == 'charp': + self.__as_strbuf = ctypes.create_string_buffer( + ctypeobj.value + b'\x00') + self._as_ctype_ptr = ctypes.cast( + self.__as_strbuf, self._ctype) + else: + self._as_ctype_ptr = ctypes.pointer(ctypeobj) + self._address = ctypes.cast(self._as_ctype_ptr, + ctypes.c_void_p).value + self._own = True + + def __add__(self, other): + if isinstance(other, (int, long)): + return self._new_pointer_at(self._address + + other * self._bitem_size) + else: + return NotImplemented + + def __sub__(self, other): + if isinstance(other, (int, long)): + return self._new_pointer_at(self._address - + other * self._bitem_size) + elif type(self) is type(other): + return (self._address - other._address) // self._bitem_size + else: + return NotImplemented + + def __getitem__(self, index): + if getattr(self, '_own', False) and index != 0: + raise IndexError + return BItem._from_ctypes(self._as_ctype_ptr[index]) + + def __setitem__(self, index, value): + self._as_ctype_ptr[index] = BItem._to_ctypes(value) + + if kind == 'charp' or kind == 'voidp': + @classmethod + def _arg_to_ctypes(cls, *value): + if value and isinstance(value[0], bytes): + return ctypes.c_char_p(value[0]) + else: + return super(CTypesPtr, cls)._arg_to_ctypes(*value) + + if kind == 'charp' or kind == 'bytep': + def _to_string(self, maxlen): + if maxlen < 0: + maxlen = sys.maxsize + p = ctypes.cast(self._as_ctype_ptr, + ctypes.POINTER(ctypes.c_char)) + n = 0 + while n < maxlen and p[n] != b'\x00': + n += 1 + return b''.join([p[i] for i in range(n)]) + + def _get_own_repr(self): + if getattr(self, '_own', False): + return 'owning %d bytes' % ( + ctypes.sizeof(self._as_ctype_ptr.contents),) + return super(CTypesPtr, self)._get_own_repr() + # + if (BItem is self.ffi._get_cached_btype(model.void_type) or + BItem is self.ffi._get_cached_btype(model.PrimitiveType('char'))): + CTypesPtr._automatic_casts = True + # + CTypesPtr._fix_class() + return CTypesPtr + + def new_array_type(self, CTypesPtr, length): + if length is None: + brackets = ' &[]' + else: + brackets = ' &[%d]' % length + BItem = CTypesPtr._BItem + getbtype = self.ffi._get_cached_btype + if BItem is getbtype(model.PrimitiveType('char')): + kind = 'char' + elif BItem in (getbtype(model.PrimitiveType('signed char')), + getbtype(model.PrimitiveType('unsigned char'))): + kind = 'byte' + else: + kind = 'generic' + # + class CTypesArray(CTypesGenericArray): + __slots__ = ['_blob', '_own'] + if length is not None: + _ctype = BItem._ctype * length + else: + __slots__.append('_ctype') + _reftypename = BItem._get_c_name(brackets) + _declared_length = length + _CTPtr = CTypesPtr + + def __init__(self, init): + if length is None: + if isinstance(init, (int, long)): + len1 = init + init = None + elif kind == 'char' and isinstance(init, bytes): + len1 = len(init) + 1 # extra null + else: + init = tuple(init) + len1 = len(init) + self._ctype = BItem._ctype * len1 + self._blob = self._ctype() + self._own = True + if init is not None: + self._initialize(self._blob, init) + + @staticmethod + def _initialize(blob, init): + if isinstance(init, bytes): + init = [init[i:i+1] for i in range(len(init))] + else: + if isinstance(init, CTypesGenericArray): + if (len(init) != len(blob) or + not isinstance(init, CTypesArray)): + raise TypeError("length/type mismatch: %s" % (init,)) + init = tuple(init) + if len(init) > len(blob): + raise IndexError("too many initializers") + addr = ctypes.cast(blob, ctypes.c_void_p).value + PTR = ctypes.POINTER(BItem._ctype) + itemsize = ctypes.sizeof(BItem._ctype) + for i, value in enumerate(init): + p = ctypes.cast(addr + i * itemsize, PTR) + BItem._initialize(p.contents, value) + + def __len__(self): + return len(self._blob) + + def __getitem__(self, index): + if not (0 <= index < len(self._blob)): + raise IndexError + return BItem._from_ctypes(self._blob[index]) + + def __setitem__(self, index, value): + if not (0 <= index < len(self._blob)): + raise IndexError + self._blob[index] = BItem._to_ctypes(value) + + if kind == 'char' or kind == 'byte': + def _to_string(self, maxlen): + if maxlen < 0: + maxlen = len(self._blob) + p = ctypes.cast(self._blob, + ctypes.POINTER(ctypes.c_char)) + n = 0 + while n < maxlen and p[n] != b'\x00': + n += 1 + return b''.join([p[i] for i in range(n)]) + + def _get_own_repr(self): + if getattr(self, '_own', False): + return 'owning %d bytes' % (ctypes.sizeof(self._blob),) + return super(CTypesArray, self)._get_own_repr() + + def _convert_to_address(self, BClass): + if BClass in (CTypesPtr, None) or BClass._automatic_casts: + return ctypes.addressof(self._blob) + else: + return CTypesData._convert_to_address(self, BClass) + + @staticmethod + def _from_ctypes(ctypes_array): + self = CTypesArray.__new__(CTypesArray) + self._blob = ctypes_array + return self + + @staticmethod + def _arg_to_ctypes(value): + return CTypesPtr._arg_to_ctypes(value) + + def __add__(self, other): + if isinstance(other, (int, long)): + return CTypesPtr._new_pointer_at( + ctypes.addressof(self._blob) + + other * ctypes.sizeof(BItem._ctype)) + else: + return NotImplemented + + @classmethod + def _cast_from(cls, source): + raise NotImplementedError("casting to %r" % ( + cls._get_c_name(),)) + # + CTypesArray._fix_class() + return CTypesArray + + def _new_struct_or_union(self, kind, name, base_ctypes_class): + # + class struct_or_union(base_ctypes_class): + pass + struct_or_union.__name__ = '%s_%s' % (kind, name) + kind1 = kind + # + class CTypesStructOrUnion(CTypesBaseStructOrUnion): + __slots__ = ['_blob'] + _ctype = struct_or_union + _reftypename = '%s &' % (name,) + _kind = kind = kind1 + # + CTypesStructOrUnion._fix_class() + return CTypesStructOrUnion + + def new_struct_type(self, name): + return self._new_struct_or_union('struct', name, ctypes.Structure) + + def new_union_type(self, name): + return self._new_struct_or_union('union', name, ctypes.Union) + + def complete_struct_or_union(self, CTypesStructOrUnion, fields, tp, + totalsize=-1, totalalignment=-1, sflags=0, + pack=0): + if totalsize >= 0 or totalalignment >= 0: + raise NotImplementedError("the ctypes backend of CFFI does not support " + "structures completed by verify(); please " + "compile and install the _cffi_backend module.") + struct_or_union = CTypesStructOrUnion._ctype + fnames = [fname for (fname, BField, bitsize) in fields] + btypes = [BField for (fname, BField, bitsize) in fields] + bitfields = [bitsize for (fname, BField, bitsize) in fields] + # + bfield_types = {} + cfields = [] + for (fname, BField, bitsize) in fields: + if bitsize < 0: + cfields.append((fname, BField._ctype)) + bfield_types[fname] = BField + else: + cfields.append((fname, BField._ctype, bitsize)) + bfield_types[fname] = Ellipsis + if sflags & 8: + struct_or_union._pack_ = 1 + elif pack: + struct_or_union._pack_ = pack + struct_or_union._fields_ = cfields + CTypesStructOrUnion._bfield_types = bfield_types + # + @staticmethod + def _create_ctype_obj(init): + result = struct_or_union() + if init is not None: + initialize(result, init) + return result + CTypesStructOrUnion._create_ctype_obj = _create_ctype_obj + # + def initialize(blob, init): + if is_union: + if len(init) > 1: + raise ValueError("union initializer: %d items given, but " + "only one supported (use a dict if needed)" + % (len(init),)) + if not isinstance(init, dict): + if isinstance(init, (bytes, unicode)): + raise TypeError("union initializer: got a str") + init = tuple(init) + if len(init) > len(fnames): + raise ValueError("too many values for %s initializer" % + CTypesStructOrUnion._get_c_name()) + init = dict(zip(fnames, init)) + addr = ctypes.addressof(blob) + for fname, value in init.items(): + BField, bitsize = name2fieldtype[fname] + assert bitsize < 0, \ + "not implemented: initializer with bit fields" + offset = CTypesStructOrUnion._offsetof(fname) + PTR = ctypes.POINTER(BField._ctype) + p = ctypes.cast(addr + offset, PTR) + BField._initialize(p.contents, value) + is_union = CTypesStructOrUnion._kind == 'union' + name2fieldtype = dict(zip(fnames, zip(btypes, bitfields))) + # + for fname, BField, bitsize in fields: + if fname == '': + raise NotImplementedError("nested anonymous structs/unions") + if hasattr(CTypesStructOrUnion, fname): + raise ValueError("the field name %r conflicts in " + "the ctypes backend" % fname) + if bitsize < 0: + def getter(self, fname=fname, BField=BField, + offset=CTypesStructOrUnion._offsetof(fname), + PTR=ctypes.POINTER(BField._ctype)): + addr = ctypes.addressof(self._blob) + p = ctypes.cast(addr + offset, PTR) + return BField._from_ctypes(p.contents) + def setter(self, value, fname=fname, BField=BField): + setattr(self._blob, fname, BField._to_ctypes(value)) + # + if issubclass(BField, CTypesGenericArray): + setter = None + if BField._declared_length == 0: + def getter(self, fname=fname, BFieldPtr=BField._CTPtr, + offset=CTypesStructOrUnion._offsetof(fname), + PTR=ctypes.POINTER(BField._ctype)): + addr = ctypes.addressof(self._blob) + p = ctypes.cast(addr + offset, PTR) + return BFieldPtr._from_ctypes(p) + # + else: + def getter(self, fname=fname, BField=BField): + return BField._from_ctypes(getattr(self._blob, fname)) + def setter(self, value, fname=fname, BField=BField): + # xxx obscure workaround + value = BField._to_ctypes(value) + oldvalue = getattr(self._blob, fname) + setattr(self._blob, fname, value) + if value != getattr(self._blob, fname): + setattr(self._blob, fname, oldvalue) + raise OverflowError("value too large for bitfield") + setattr(CTypesStructOrUnion, fname, property(getter, setter)) + # + CTypesPtr = self.ffi._get_cached_btype(model.PointerType(tp)) + for fname in fnames: + if hasattr(CTypesPtr, fname): + raise ValueError("the field name %r conflicts in " + "the ctypes backend" % fname) + def getter(self, fname=fname): + return getattr(self[0], fname) + def setter(self, value, fname=fname): + setattr(self[0], fname, value) + setattr(CTypesPtr, fname, property(getter, setter)) + + def new_function_type(self, BArgs, BResult, has_varargs): + nameargs = [BArg._get_c_name() for BArg in BArgs] + if has_varargs: + nameargs.append('...') + nameargs = ', '.join(nameargs) + # + class CTypesFunctionPtr(CTypesGenericPtr): + __slots__ = ['_own_callback', '_name'] + _ctype = ctypes.CFUNCTYPE(getattr(BResult, '_ctype', None), + *[BArg._ctype for BArg in BArgs], + use_errno=True) + _reftypename = BResult._get_c_name('(* &)(%s)' % (nameargs,)) + + def __init__(self, init, error=None): + # create a callback to the Python callable init() + import traceback + assert not has_varargs, "varargs not supported for callbacks" + if getattr(BResult, '_ctype', None) is not None: + error = BResult._from_ctypes( + BResult._create_ctype_obj(error)) + else: + error = None + def callback(*args): + args2 = [] + for arg, BArg in zip(args, BArgs): + args2.append(BArg._from_ctypes(arg)) + try: + res2 = init(*args2) + res2 = BResult._to_ctypes(res2) + except: + traceback.print_exc() + res2 = error + if issubclass(BResult, CTypesGenericPtr): + if res2: + res2 = ctypes.cast(res2, ctypes.c_void_p).value + # .value: http://bugs.python.org/issue1574593 + else: + res2 = None + #print repr(res2) + return res2 + if issubclass(BResult, CTypesGenericPtr): + # The only pointers callbacks can return are void*s: + # http://bugs.python.org/issue5710 + callback_ctype = ctypes.CFUNCTYPE( + ctypes.c_void_p, + *[BArg._ctype for BArg in BArgs], + use_errno=True) + else: + callback_ctype = CTypesFunctionPtr._ctype + self._as_ctype_ptr = callback_ctype(callback) + self._address = ctypes.cast(self._as_ctype_ptr, + ctypes.c_void_p).value + self._own_callback = init + + @staticmethod + def _initialize(ctypes_ptr, value): + if value: + raise NotImplementedError("ctypes backend: not supported: " + "initializers for function pointers") + + def __repr__(self): + c_name = getattr(self, '_name', None) + if c_name: + i = self._reftypename.index('(* &)') + if self._reftypename[i-1] not in ' )*': + c_name = ' ' + c_name + c_name = self._reftypename.replace('(* &)', c_name) + return CTypesData.__repr__(self, c_name) + + def _get_own_repr(self): + if getattr(self, '_own_callback', None) is not None: + return 'calling %r' % (self._own_callback,) + return super(CTypesFunctionPtr, self)._get_own_repr() + + def __call__(self, *args): + if has_varargs: + assert len(args) >= len(BArgs) + extraargs = args[len(BArgs):] + args = args[:len(BArgs)] + else: + assert len(args) == len(BArgs) + ctypes_args = [] + for arg, BArg in zip(args, BArgs): + ctypes_args.append(BArg._arg_to_ctypes(arg)) + if has_varargs: + for i, arg in enumerate(extraargs): + if arg is None: + ctypes_args.append(ctypes.c_void_p(0)) # NULL + continue + if not isinstance(arg, CTypesData): + raise TypeError( + "argument %d passed in the variadic part " + "needs to be a cdata object (got %s)" % + (1 + len(BArgs) + i, type(arg).__name__)) + ctypes_args.append(arg._arg_to_ctypes(arg)) + result = self._as_ctype_ptr(*ctypes_args) + return BResult._from_ctypes(result) + # + CTypesFunctionPtr._fix_class() + return CTypesFunctionPtr + + def new_enum_type(self, name, enumerators, enumvalues, CTypesInt): + assert isinstance(name, str) + reverse_mapping = dict(zip(reversed(enumvalues), + reversed(enumerators))) + # + class CTypesEnum(CTypesInt): + __slots__ = [] + _reftypename = '%s &' % name + + def _get_own_repr(self): + value = self._value + try: + return '%d: %s' % (value, reverse_mapping[value]) + except KeyError: + return str(value) + + def _to_string(self, maxlen): + value = self._value + try: + return reverse_mapping[value] + except KeyError: + return str(value) + # + CTypesEnum._fix_class() + return CTypesEnum + + def get_errno(self): + return ctypes.get_errno() + + def set_errno(self, value): + ctypes.set_errno(value) + + def string(self, b, maxlen=-1): + return b._to_string(maxlen) + + def buffer(self, bptr, size=-1): + raise NotImplementedError("buffer() with ctypes backend") + + def sizeof(self, cdata_or_BType): + if isinstance(cdata_or_BType, CTypesData): + return cdata_or_BType._get_size_of_instance() + else: + assert issubclass(cdata_or_BType, CTypesData) + return cdata_or_BType._get_size() + + def alignof(self, BType): + assert issubclass(BType, CTypesData) + return BType._alignment() + + def newp(self, BType, source): + if not issubclass(BType, CTypesData): + raise TypeError + return BType._newp(source) + + def cast(self, BType, source): + return BType._cast_from(source) + + def callback(self, BType, source, error, onerror): + assert onerror is None # XXX not implemented + return BType(source, error) + + _weakref_cache_ref = None + + def gcp(self, cdata, destructor, size=0): + if self._weakref_cache_ref is None: + import weakref + class MyRef(weakref.ref): + def __eq__(self, other): + myref = self() + return self is other or ( + myref is not None and myref is other()) + def __ne__(self, other): + return not (self == other) + def __hash__(self): + try: + return self._hash + except AttributeError: + self._hash = hash(self()) + return self._hash + self._weakref_cache_ref = {}, MyRef + weak_cache, MyRef = self._weakref_cache_ref + + if destructor is None: + try: + del weak_cache[MyRef(cdata)] + except KeyError: + raise TypeError("Can remove destructor only on a object " + "previously returned by ffi.gc()") + return None + + def remove(k): + cdata, destructor = weak_cache.pop(k, (None, None)) + if destructor is not None: + destructor(cdata) + + new_cdata = self.cast(self.typeof(cdata), cdata) + assert new_cdata is not cdata + weak_cache[MyRef(new_cdata, remove)] = (cdata, destructor) + return new_cdata + + typeof = type + + def getcname(self, BType, replace_with): + return BType._get_c_name(replace_with) + + def typeoffsetof(self, BType, fieldname, num=0): + if isinstance(fieldname, str): + if num == 0 and issubclass(BType, CTypesGenericPtr): + BType = BType._BItem + if not issubclass(BType, CTypesBaseStructOrUnion): + raise TypeError("expected a struct or union ctype") + BField = BType._bfield_types[fieldname] + if BField is Ellipsis: + raise TypeError("not supported for bitfields") + return (BField, BType._offsetof(fieldname)) + elif isinstance(fieldname, (int, long)): + if issubclass(BType, CTypesGenericArray): + BType = BType._CTPtr + if not issubclass(BType, CTypesGenericPtr): + raise TypeError("expected an array or ptr ctype") + BItem = BType._BItem + offset = BItem._get_size() * fieldname + if offset > sys.maxsize: + raise OverflowError + return (BItem, offset) + else: + raise TypeError(type(fieldname)) + + def rawaddressof(self, BTypePtr, cdata, offset=None): + if isinstance(cdata, CTypesBaseStructOrUnion): + ptr = ctypes.pointer(type(cdata)._to_ctypes(cdata)) + elif isinstance(cdata, CTypesGenericPtr): + if offset is None or not issubclass(type(cdata)._BItem, + CTypesBaseStructOrUnion): + raise TypeError("unexpected cdata type") + ptr = type(cdata)._to_ctypes(cdata) + elif isinstance(cdata, CTypesGenericArray): + ptr = type(cdata)._to_ctypes(cdata) + else: + raise TypeError("expected a ") + if offset: + ptr = ctypes.cast( + ctypes.c_void_p( + ctypes.cast(ptr, ctypes.c_void_p).value + offset), + type(ptr)) + return BTypePtr._from_ctypes(ptr) + + +class CTypesLibrary(object): + + def __init__(self, backend, cdll): + self.backend = backend + self.cdll = cdll + + def load_function(self, BType, name): + c_func = getattr(self.cdll, name) + funcobj = BType._from_ctypes(c_func) + funcobj._name = name + return funcobj + + def read_variable(self, BType, name): + try: + ctypes_obj = BType._ctype.in_dll(self.cdll, name) + except AttributeError as e: + raise NotImplementedError(e) + return BType._from_ctypes(ctypes_obj) + + def write_variable(self, BType, name, value): + new_ctypes_obj = BType._to_ctypes(value) + ctypes_obj = BType._ctype.in_dll(self.cdll, name) + ctypes.memmove(ctypes.addressof(ctypes_obj), + ctypes.addressof(new_ctypes_obj), + ctypes.sizeof(BType._ctype)) diff --git a/venv/lib/python3.12/site-packages/cffi/cffi_opcode.py b/venv/lib/python3.12/site-packages/cffi/cffi_opcode.py new file mode 100644 index 00000000..6421df62 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi/cffi_opcode.py @@ -0,0 +1,187 @@ +from .error import VerificationError + +class CffiOp(object): + def __init__(self, op, arg): + self.op = op + self.arg = arg + + def as_c_expr(self): + if self.op is None: + assert isinstance(self.arg, str) + return '(_cffi_opcode_t)(%s)' % (self.arg,) + classname = CLASS_NAME[self.op] + return '_CFFI_OP(_CFFI_OP_%s, %s)' % (classname, self.arg) + + def as_python_bytes(self): + if self.op is None and self.arg.isdigit(): + value = int(self.arg) # non-negative: '-' not in self.arg + if value >= 2**31: + raise OverflowError("cannot emit %r: limited to 2**31-1" + % (self.arg,)) + return format_four_bytes(value) + if isinstance(self.arg, str): + raise VerificationError("cannot emit to Python: %r" % (self.arg,)) + return format_four_bytes((self.arg << 8) | self.op) + + def __str__(self): + classname = CLASS_NAME.get(self.op, self.op) + return '(%s %s)' % (classname, self.arg) + +def format_four_bytes(num): + return '\\x%02X\\x%02X\\x%02X\\x%02X' % ( + (num >> 24) & 0xFF, + (num >> 16) & 0xFF, + (num >> 8) & 0xFF, + (num ) & 0xFF) + +OP_PRIMITIVE = 1 +OP_POINTER = 3 +OP_ARRAY = 5 +OP_OPEN_ARRAY = 7 +OP_STRUCT_UNION = 9 +OP_ENUM = 11 +OP_FUNCTION = 13 +OP_FUNCTION_END = 15 +OP_NOOP = 17 +OP_BITFIELD = 19 +OP_TYPENAME = 21 +OP_CPYTHON_BLTN_V = 23 # varargs +OP_CPYTHON_BLTN_N = 25 # noargs +OP_CPYTHON_BLTN_O = 27 # O (i.e. a single arg) +OP_CONSTANT = 29 +OP_CONSTANT_INT = 31 +OP_GLOBAL_VAR = 33 +OP_DLOPEN_FUNC = 35 +OP_DLOPEN_CONST = 37 +OP_GLOBAL_VAR_F = 39 +OP_EXTERN_PYTHON = 41 + +PRIM_VOID = 0 +PRIM_BOOL = 1 +PRIM_CHAR = 2 +PRIM_SCHAR = 3 +PRIM_UCHAR = 4 +PRIM_SHORT = 5 +PRIM_USHORT = 6 +PRIM_INT = 7 +PRIM_UINT = 8 +PRIM_LONG = 9 +PRIM_ULONG = 10 +PRIM_LONGLONG = 11 +PRIM_ULONGLONG = 12 +PRIM_FLOAT = 13 +PRIM_DOUBLE = 14 +PRIM_LONGDOUBLE = 15 + +PRIM_WCHAR = 16 +PRIM_INT8 = 17 +PRIM_UINT8 = 18 +PRIM_INT16 = 19 +PRIM_UINT16 = 20 +PRIM_INT32 = 21 +PRIM_UINT32 = 22 +PRIM_INT64 = 23 +PRIM_UINT64 = 24 +PRIM_INTPTR = 25 +PRIM_UINTPTR = 26 +PRIM_PTRDIFF = 27 +PRIM_SIZE = 28 +PRIM_SSIZE = 29 +PRIM_INT_LEAST8 = 30 +PRIM_UINT_LEAST8 = 31 +PRIM_INT_LEAST16 = 32 +PRIM_UINT_LEAST16 = 33 +PRIM_INT_LEAST32 = 34 +PRIM_UINT_LEAST32 = 35 +PRIM_INT_LEAST64 = 36 +PRIM_UINT_LEAST64 = 37 +PRIM_INT_FAST8 = 38 +PRIM_UINT_FAST8 = 39 +PRIM_INT_FAST16 = 40 +PRIM_UINT_FAST16 = 41 +PRIM_INT_FAST32 = 42 +PRIM_UINT_FAST32 = 43 +PRIM_INT_FAST64 = 44 +PRIM_UINT_FAST64 = 45 +PRIM_INTMAX = 46 +PRIM_UINTMAX = 47 +PRIM_FLOATCOMPLEX = 48 +PRIM_DOUBLECOMPLEX = 49 +PRIM_CHAR16 = 50 +PRIM_CHAR32 = 51 + +_NUM_PRIM = 52 +_UNKNOWN_PRIM = -1 +_UNKNOWN_FLOAT_PRIM = -2 +_UNKNOWN_LONG_DOUBLE = -3 + +_IO_FILE_STRUCT = -1 + +PRIMITIVE_TO_INDEX = { + 'char': PRIM_CHAR, + 'short': PRIM_SHORT, + 'int': PRIM_INT, + 'long': PRIM_LONG, + 'long long': PRIM_LONGLONG, + 'signed char': PRIM_SCHAR, + 'unsigned char': PRIM_UCHAR, + 'unsigned short': PRIM_USHORT, + 'unsigned int': PRIM_UINT, + 'unsigned long': PRIM_ULONG, + 'unsigned long long': PRIM_ULONGLONG, + 'float': PRIM_FLOAT, + 'double': PRIM_DOUBLE, + 'long double': PRIM_LONGDOUBLE, + '_cffi_float_complex_t': PRIM_FLOATCOMPLEX, + '_cffi_double_complex_t': PRIM_DOUBLECOMPLEX, + '_Bool': PRIM_BOOL, + 'wchar_t': PRIM_WCHAR, + 'char16_t': PRIM_CHAR16, + 'char32_t': PRIM_CHAR32, + 'int8_t': PRIM_INT8, + 'uint8_t': PRIM_UINT8, + 'int16_t': PRIM_INT16, + 'uint16_t': PRIM_UINT16, + 'int32_t': PRIM_INT32, + 'uint32_t': PRIM_UINT32, + 'int64_t': PRIM_INT64, + 'uint64_t': PRIM_UINT64, + 'intptr_t': PRIM_INTPTR, + 'uintptr_t': PRIM_UINTPTR, + 'ptrdiff_t': PRIM_PTRDIFF, + 'size_t': PRIM_SIZE, + 'ssize_t': PRIM_SSIZE, + 'int_least8_t': PRIM_INT_LEAST8, + 'uint_least8_t': PRIM_UINT_LEAST8, + 'int_least16_t': PRIM_INT_LEAST16, + 'uint_least16_t': PRIM_UINT_LEAST16, + 'int_least32_t': PRIM_INT_LEAST32, + 'uint_least32_t': PRIM_UINT_LEAST32, + 'int_least64_t': PRIM_INT_LEAST64, + 'uint_least64_t': PRIM_UINT_LEAST64, + 'int_fast8_t': PRIM_INT_FAST8, + 'uint_fast8_t': PRIM_UINT_FAST8, + 'int_fast16_t': PRIM_INT_FAST16, + 'uint_fast16_t': PRIM_UINT_FAST16, + 'int_fast32_t': PRIM_INT_FAST32, + 'uint_fast32_t': PRIM_UINT_FAST32, + 'int_fast64_t': PRIM_INT_FAST64, + 'uint_fast64_t': PRIM_UINT_FAST64, + 'intmax_t': PRIM_INTMAX, + 'uintmax_t': PRIM_UINTMAX, + } + +F_UNION = 0x01 +F_CHECK_FIELDS = 0x02 +F_PACKED = 0x04 +F_EXTERNAL = 0x08 +F_OPAQUE = 0x10 + +G_FLAGS = dict([('_CFFI_' + _key, globals()[_key]) + for _key in ['F_UNION', 'F_CHECK_FIELDS', 'F_PACKED', + 'F_EXTERNAL', 'F_OPAQUE']]) + +CLASS_NAME = {} +for _name, _value in list(globals().items()): + if _name.startswith('OP_') and isinstance(_value, int): + CLASS_NAME[_value] = _name[3:] diff --git a/venv/lib/python3.12/site-packages/cffi/commontypes.py b/venv/lib/python3.12/site-packages/cffi/commontypes.py new file mode 100644 index 00000000..d4dae351 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi/commontypes.py @@ -0,0 +1,82 @@ +import sys +from . import model +from .error import FFIError + + +COMMON_TYPES = {} + +try: + # fetch "bool" and all simple Windows types + from _cffi_backend import _get_common_types + _get_common_types(COMMON_TYPES) +except ImportError: + pass + +COMMON_TYPES['FILE'] = model.unknown_type('FILE', '_IO_FILE') +COMMON_TYPES['bool'] = '_Bool' # in case we got ImportError above +COMMON_TYPES['float _Complex'] = '_cffi_float_complex_t' +COMMON_TYPES['double _Complex'] = '_cffi_double_complex_t' + +for _type in model.PrimitiveType.ALL_PRIMITIVE_TYPES: + if _type.endswith('_t'): + COMMON_TYPES[_type] = _type +del _type + +_CACHE = {} + +def resolve_common_type(parser, commontype): + try: + return _CACHE[commontype] + except KeyError: + cdecl = COMMON_TYPES.get(commontype, commontype) + if not isinstance(cdecl, str): + result, quals = cdecl, 0 # cdecl is already a BaseType + elif cdecl in model.PrimitiveType.ALL_PRIMITIVE_TYPES: + result, quals = model.PrimitiveType(cdecl), 0 + elif cdecl == 'set-unicode-needed': + raise FFIError("The Windows type %r is only available after " + "you call ffi.set_unicode()" % (commontype,)) + else: + if commontype == cdecl: + raise FFIError( + "Unsupported type: %r. Please look at " + "http://cffi.readthedocs.io/en/latest/cdef.html#ffi-cdef-limitations " + "and file an issue if you think this type should really " + "be supported." % (commontype,)) + result, quals = parser.parse_type_and_quals(cdecl) # recursive + + assert isinstance(result, model.BaseTypeByIdentity) + _CACHE[commontype] = result, quals + return result, quals + + +# ____________________________________________________________ +# extra types for Windows (most of them are in commontypes.c) + + +def win_common_types(): + return { + "UNICODE_STRING": model.StructType( + "_UNICODE_STRING", + ["Length", + "MaximumLength", + "Buffer"], + [model.PrimitiveType("unsigned short"), + model.PrimitiveType("unsigned short"), + model.PointerType(model.PrimitiveType("wchar_t"))], + [-1, -1, -1]), + "PUNICODE_STRING": "UNICODE_STRING *", + "PCUNICODE_STRING": "const UNICODE_STRING *", + + "TBYTE": "set-unicode-needed", + "TCHAR": "set-unicode-needed", + "LPCTSTR": "set-unicode-needed", + "PCTSTR": "set-unicode-needed", + "LPTSTR": "set-unicode-needed", + "PTSTR": "set-unicode-needed", + "PTBYTE": "set-unicode-needed", + "PTCHAR": "set-unicode-needed", + } + +if sys.platform == 'win32': + COMMON_TYPES.update(win_common_types()) diff --git a/venv/lib/python3.12/site-packages/cffi/cparser.py b/venv/lib/python3.12/site-packages/cffi/cparser.py new file mode 100644 index 00000000..dd590d87 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi/cparser.py @@ -0,0 +1,1015 @@ +from . import model +from .commontypes import COMMON_TYPES, resolve_common_type +from .error import FFIError, CDefError +try: + from . import _pycparser as pycparser +except ImportError: + import pycparser +import weakref, re, sys + +try: + if sys.version_info < (3,): + import thread as _thread + else: + import _thread + lock = _thread.allocate_lock() +except ImportError: + lock = None + +def _workaround_for_static_import_finders(): + # Issue #392: packaging tools like cx_Freeze can not find these + # because pycparser uses exec dynamic import. This is an obscure + # workaround. This function is never called. + import pycparser.yacctab + import pycparser.lextab + +CDEF_SOURCE_STRING = "" +_r_comment = re.compile(r"/\*.*?\*/|//([^\n\\]|\\.)*?$", + re.DOTALL | re.MULTILINE) +_r_define = re.compile(r"^\s*#\s*define\s+([A-Za-z_][A-Za-z_0-9]*)" + r"\b((?:[^\n\\]|\\.)*?)$", + re.DOTALL | re.MULTILINE) +_r_line_directive = re.compile(r"^[ \t]*#[ \t]*(?:line|\d+)\b.*$", re.MULTILINE) +_r_partial_enum = re.compile(r"=\s*\.\.\.\s*[,}]|\.\.\.\s*\}") +_r_enum_dotdotdot = re.compile(r"__dotdotdot\d+__$") +_r_partial_array = re.compile(r"\[\s*\.\.\.\s*\]") +_r_words = re.compile(r"\w+|\S") +_parser_cache = None +_r_int_literal = re.compile(r"-?0?x?[0-9a-f]+[lu]*$", re.IGNORECASE) +_r_stdcall1 = re.compile(r"\b(__stdcall|WINAPI)\b") +_r_stdcall2 = re.compile(r"[(]\s*(__stdcall|WINAPI)\b") +_r_cdecl = re.compile(r"\b__cdecl\b") +_r_extern_python = re.compile(r'\bextern\s*"' + r'(Python|Python\s*\+\s*C|C\s*\+\s*Python)"\s*.') +_r_star_const_space = re.compile( # matches "* const " + r"[*]\s*((const|volatile|restrict)\b\s*)+") +_r_int_dotdotdot = re.compile(r"(\b(int|long|short|signed|unsigned|char)\s*)+" + r"\.\.\.") +_r_float_dotdotdot = re.compile(r"\b(double|float)\s*\.\.\.") + +def _get_parser(): + global _parser_cache + if _parser_cache is None: + _parser_cache = pycparser.CParser() + return _parser_cache + +def _workaround_for_old_pycparser(csource): + # Workaround for a pycparser issue (fixed between pycparser 2.10 and + # 2.14): "char*const***" gives us a wrong syntax tree, the same as + # for "char***(*const)". This means we can't tell the difference + # afterwards. But "char(*const(***))" gives us the right syntax + # tree. The issue only occurs if there are several stars in + # sequence with no parenthesis in between, just possibly qualifiers. + # Attempt to fix it by adding some parentheses in the source: each + # time we see "* const" or "* const *", we add an opening + # parenthesis before each star---the hard part is figuring out where + # to close them. + parts = [] + while True: + match = _r_star_const_space.search(csource) + if not match: + break + #print repr(''.join(parts)+csource), '=>', + parts.append(csource[:match.start()]) + parts.append('('); closing = ')' + parts.append(match.group()) # e.g. "* const " + endpos = match.end() + if csource.startswith('*', endpos): + parts.append('('); closing += ')' + level = 0 + i = endpos + while i < len(csource): + c = csource[i] + if c == '(': + level += 1 + elif c == ')': + if level == 0: + break + level -= 1 + elif c in ',;=': + if level == 0: + break + i += 1 + csource = csource[endpos:i] + closing + csource[i:] + #print repr(''.join(parts)+csource) + parts.append(csource) + return ''.join(parts) + +def _preprocess_extern_python(csource): + # input: `extern "Python" int foo(int);` or + # `extern "Python" { int foo(int); }` + # output: + # void __cffi_extern_python_start; + # int foo(int); + # void __cffi_extern_python_stop; + # + # input: `extern "Python+C" int foo(int);` + # output: + # void __cffi_extern_python_plus_c_start; + # int foo(int); + # void __cffi_extern_python_stop; + parts = [] + while True: + match = _r_extern_python.search(csource) + if not match: + break + endpos = match.end() - 1 + #print + #print ''.join(parts)+csource + #print '=>' + parts.append(csource[:match.start()]) + if 'C' in match.group(1): + parts.append('void __cffi_extern_python_plus_c_start; ') + else: + parts.append('void __cffi_extern_python_start; ') + if csource[endpos] == '{': + # grouping variant + closing = csource.find('}', endpos) + if closing < 0: + raise CDefError("'extern \"Python\" {': no '}' found") + if csource.find('{', endpos + 1, closing) >= 0: + raise NotImplementedError("cannot use { } inside a block " + "'extern \"Python\" { ... }'") + parts.append(csource[endpos+1:closing]) + csource = csource[closing+1:] + else: + # non-grouping variant + semicolon = csource.find(';', endpos) + if semicolon < 0: + raise CDefError("'extern \"Python\": no ';' found") + parts.append(csource[endpos:semicolon+1]) + csource = csource[semicolon+1:] + parts.append(' void __cffi_extern_python_stop;') + #print ''.join(parts)+csource + #print + parts.append(csource) + return ''.join(parts) + +def _warn_for_string_literal(csource): + if '"' not in csource: + return + for line in csource.splitlines(): + if '"' in line and not line.lstrip().startswith('#'): + import warnings + warnings.warn("String literal found in cdef() or type source. " + "String literals are ignored here, but you should " + "remove them anyway because some character sequences " + "confuse pre-parsing.") + break + +def _warn_for_non_extern_non_static_global_variable(decl): + if not decl.storage: + import warnings + warnings.warn("Global variable '%s' in cdef(): for consistency " + "with C it should have a storage class specifier " + "(usually 'extern')" % (decl.name,)) + +def _remove_line_directives(csource): + # _r_line_directive matches whole lines, without the final \n, if they + # start with '#line' with some spacing allowed, or '#NUMBER'. This + # function stores them away and replaces them with exactly the string + # '#line@N', where N is the index in the list 'line_directives'. + line_directives = [] + def replace(m): + i = len(line_directives) + line_directives.append(m.group()) + return '#line@%d' % i + csource = _r_line_directive.sub(replace, csource) + return csource, line_directives + +def _put_back_line_directives(csource, line_directives): + def replace(m): + s = m.group() + if not s.startswith('#line@'): + raise AssertionError("unexpected #line directive " + "(should have been processed and removed") + return line_directives[int(s[6:])] + return _r_line_directive.sub(replace, csource) + +def _preprocess(csource): + # First, remove the lines of the form '#line N "filename"' because + # the "filename" part could confuse the rest + csource, line_directives = _remove_line_directives(csource) + # Remove comments. NOTE: this only work because the cdef() section + # should not contain any string literals (except in line directives)! + def replace_keeping_newlines(m): + return ' ' + m.group().count('\n') * '\n' + csource = _r_comment.sub(replace_keeping_newlines, csource) + # Remove the "#define FOO x" lines + macros = {} + for match in _r_define.finditer(csource): + macroname, macrovalue = match.groups() + macrovalue = macrovalue.replace('\\\n', '').strip() + macros[macroname] = macrovalue + csource = _r_define.sub('', csource) + # + if pycparser.__version__ < '2.14': + csource = _workaround_for_old_pycparser(csource) + # + # BIG HACK: replace WINAPI or __stdcall with "volatile const". + # It doesn't make sense for the return type of a function to be + # "volatile volatile const", so we abuse it to detect __stdcall... + # Hack number 2 is that "int(volatile *fptr)();" is not valid C + # syntax, so we place the "volatile" before the opening parenthesis. + csource = _r_stdcall2.sub(' volatile volatile const(', csource) + csource = _r_stdcall1.sub(' volatile volatile const ', csource) + csource = _r_cdecl.sub(' ', csource) + # + # Replace `extern "Python"` with start/end markers + csource = _preprocess_extern_python(csource) + # + # Now there should not be any string literal left; warn if we get one + _warn_for_string_literal(csource) + # + # Replace "[...]" with "[__dotdotdotarray__]" + csource = _r_partial_array.sub('[__dotdotdotarray__]', csource) + # + # Replace "...}" with "__dotdotdotNUM__}". This construction should + # occur only at the end of enums; at the end of structs we have "...;}" + # and at the end of vararg functions "...);". Also replace "=...[,}]" + # with ",__dotdotdotNUM__[,}]": this occurs in the enums too, when + # giving an unknown value. + matches = list(_r_partial_enum.finditer(csource)) + for number, match in enumerate(reversed(matches)): + p = match.start() + if csource[p] == '=': + p2 = csource.find('...', p, match.end()) + assert p2 > p + csource = '%s,__dotdotdot%d__ %s' % (csource[:p], number, + csource[p2+3:]) + else: + assert csource[p:p+3] == '...' + csource = '%s __dotdotdot%d__ %s' % (csource[:p], number, + csource[p+3:]) + # Replace "int ..." or "unsigned long int..." with "__dotdotdotint__" + csource = _r_int_dotdotdot.sub(' __dotdotdotint__ ', csource) + # Replace "float ..." or "double..." with "__dotdotdotfloat__" + csource = _r_float_dotdotdot.sub(' __dotdotdotfloat__ ', csource) + # Replace all remaining "..." with the same name, "__dotdotdot__", + # which is declared with a typedef for the purpose of C parsing. + csource = csource.replace('...', ' __dotdotdot__ ') + # Finally, put back the line directives + csource = _put_back_line_directives(csource, line_directives) + return csource, macros + +def _common_type_names(csource): + # Look in the source for what looks like usages of types from the + # list of common types. A "usage" is approximated here as the + # appearance of the word, minus a "definition" of the type, which + # is the last word in a "typedef" statement. Approximative only + # but should be fine for all the common types. + look_for_words = set(COMMON_TYPES) + look_for_words.add(';') + look_for_words.add(',') + look_for_words.add('(') + look_for_words.add(')') + look_for_words.add('typedef') + words_used = set() + is_typedef = False + paren = 0 + previous_word = '' + for word in _r_words.findall(csource): + if word in look_for_words: + if word == ';': + if is_typedef: + words_used.discard(previous_word) + look_for_words.discard(previous_word) + is_typedef = False + elif word == 'typedef': + is_typedef = True + paren = 0 + elif word == '(': + paren += 1 + elif word == ')': + paren -= 1 + elif word == ',': + if is_typedef and paren == 0: + words_used.discard(previous_word) + look_for_words.discard(previous_word) + else: # word in COMMON_TYPES + words_used.add(word) + previous_word = word + return words_used + + +class Parser(object): + + def __init__(self): + self._declarations = {} + self._included_declarations = set() + self._anonymous_counter = 0 + self._structnode2type = weakref.WeakKeyDictionary() + self._options = {} + self._int_constants = {} + self._recomplete = [] + self._uses_new_feature = None + + def _parse(self, csource): + csource, macros = _preprocess(csource) + # XXX: for more efficiency we would need to poke into the + # internals of CParser... the following registers the + # typedefs, because their presence or absence influences the + # parsing itself (but what they are typedef'ed to plays no role) + ctn = _common_type_names(csource) + typenames = [] + for name in sorted(self._declarations): + if name.startswith('typedef '): + name = name[8:] + typenames.append(name) + ctn.discard(name) + typenames += sorted(ctn) + # + csourcelines = [] + csourcelines.append('# 1 ""') + for typename in typenames: + csourcelines.append('typedef int %s;' % typename) + csourcelines.append('typedef int __dotdotdotint__, __dotdotdotfloat__,' + ' __dotdotdot__;') + # this forces pycparser to consider the following in the file + # called from line 1 + csourcelines.append('# 1 "%s"' % (CDEF_SOURCE_STRING,)) + csourcelines.append(csource) + csourcelines.append('') # see test_missing_newline_bug + fullcsource = '\n'.join(csourcelines) + if lock is not None: + lock.acquire() # pycparser is not thread-safe... + try: + ast = _get_parser().parse(fullcsource) + except pycparser.c_parser.ParseError as e: + self.convert_pycparser_error(e, csource) + finally: + if lock is not None: + lock.release() + # csource will be used to find buggy source text + return ast, macros, csource + + def _convert_pycparser_error(self, e, csource): + # xxx look for ":NUM:" at the start of str(e) + # and interpret that as a line number. This will not work if + # the user gives explicit ``# NUM "FILE"`` directives. + line = None + msg = str(e) + match = re.match(r"%s:(\d+):" % (CDEF_SOURCE_STRING,), msg) + if match: + linenum = int(match.group(1), 10) + csourcelines = csource.splitlines() + if 1 <= linenum <= len(csourcelines): + line = csourcelines[linenum-1] + return line + + def convert_pycparser_error(self, e, csource): + line = self._convert_pycparser_error(e, csource) + + msg = str(e) + if line: + msg = 'cannot parse "%s"\n%s' % (line.strip(), msg) + else: + msg = 'parse error\n%s' % (msg,) + raise CDefError(msg) + + def parse(self, csource, override=False, packed=False, pack=None, + dllexport=False): + if packed: + if packed != True: + raise ValueError("'packed' should be False or True; use " + "'pack' to give another value") + if pack: + raise ValueError("cannot give both 'pack' and 'packed'") + pack = 1 + elif pack: + if pack & (pack - 1): + raise ValueError("'pack' must be a power of two, not %r" % + (pack,)) + else: + pack = 0 + prev_options = self._options + try: + self._options = {'override': override, + 'packed': pack, + 'dllexport': dllexport} + self._internal_parse(csource) + finally: + self._options = prev_options + + def _internal_parse(self, csource): + ast, macros, csource = self._parse(csource) + # add the macros + self._process_macros(macros) + # find the first "__dotdotdot__" and use that as a separator + # between the repeated typedefs and the real csource + iterator = iter(ast.ext) + for decl in iterator: + if decl.name == '__dotdotdot__': + break + else: + assert 0 + current_decl = None + # + try: + self._inside_extern_python = '__cffi_extern_python_stop' + for decl in iterator: + current_decl = decl + if isinstance(decl, pycparser.c_ast.Decl): + self._parse_decl(decl) + elif isinstance(decl, pycparser.c_ast.Typedef): + if not decl.name: + raise CDefError("typedef does not declare any name", + decl) + quals = 0 + if (isinstance(decl.type.type, pycparser.c_ast.IdentifierType) and + decl.type.type.names[-1].startswith('__dotdotdot')): + realtype = self._get_unknown_type(decl) + elif (isinstance(decl.type, pycparser.c_ast.PtrDecl) and + isinstance(decl.type.type, pycparser.c_ast.TypeDecl) and + isinstance(decl.type.type.type, + pycparser.c_ast.IdentifierType) and + decl.type.type.type.names[-1].startswith('__dotdotdot')): + realtype = self._get_unknown_ptr_type(decl) + else: + realtype, quals = self._get_type_and_quals( + decl.type, name=decl.name, partial_length_ok=True, + typedef_example="*(%s *)0" % (decl.name,)) + self._declare('typedef ' + decl.name, realtype, quals=quals) + elif decl.__class__.__name__ == 'Pragma': + # skip pragma, only in pycparser 2.15 + import warnings + warnings.warn( + "#pragma in cdef() are entirely ignored. " + "They should be removed for now, otherwise your " + "code might behave differently in a future version " + "of CFFI if #pragma support gets added. Note that " + "'#pragma pack' needs to be replaced with the " + "'packed' keyword argument to cdef().") + else: + raise CDefError("unexpected <%s>: this construct is valid " + "C but not valid in cdef()" % + decl.__class__.__name__, decl) + except CDefError as e: + if len(e.args) == 1: + e.args = e.args + (current_decl,) + raise + except FFIError as e: + msg = self._convert_pycparser_error(e, csource) + if msg: + e.args = (e.args[0] + "\n *** Err: %s" % msg,) + raise + + def _add_constants(self, key, val): + if key in self._int_constants: + if self._int_constants[key] == val: + return # ignore identical double declarations + raise FFIError( + "multiple declarations of constant: %s" % (key,)) + self._int_constants[key] = val + + def _add_integer_constant(self, name, int_str): + int_str = int_str.lower().rstrip("ul") + neg = int_str.startswith('-') + if neg: + int_str = int_str[1:] + # "010" is not valid oct in py3 + if (int_str.startswith("0") and int_str != '0' + and not int_str.startswith("0x")): + int_str = "0o" + int_str[1:] + pyvalue = int(int_str, 0) + if neg: + pyvalue = -pyvalue + self._add_constants(name, pyvalue) + self._declare('macro ' + name, pyvalue) + + def _process_macros(self, macros): + for key, value in macros.items(): + value = value.strip() + if _r_int_literal.match(value): + self._add_integer_constant(key, value) + elif value == '...': + self._declare('macro ' + key, value) + else: + raise CDefError( + 'only supports one of the following syntax:\n' + ' #define %s ... (literally dot-dot-dot)\n' + ' #define %s NUMBER (with NUMBER an integer' + ' constant, decimal/hex/octal)\n' + 'got:\n' + ' #define %s %s' + % (key, key, key, value)) + + def _declare_function(self, tp, quals, decl): + tp = self._get_type_pointer(tp, quals) + if self._options.get('dllexport'): + tag = 'dllexport_python ' + elif self._inside_extern_python == '__cffi_extern_python_start': + tag = 'extern_python ' + elif self._inside_extern_python == '__cffi_extern_python_plus_c_start': + tag = 'extern_python_plus_c ' + else: + tag = 'function ' + self._declare(tag + decl.name, tp) + + def _parse_decl(self, decl): + node = decl.type + if isinstance(node, pycparser.c_ast.FuncDecl): + tp, quals = self._get_type_and_quals(node, name=decl.name) + assert isinstance(tp, model.RawFunctionType) + self._declare_function(tp, quals, decl) + else: + if isinstance(node, pycparser.c_ast.Struct): + self._get_struct_union_enum_type('struct', node) + elif isinstance(node, pycparser.c_ast.Union): + self._get_struct_union_enum_type('union', node) + elif isinstance(node, pycparser.c_ast.Enum): + self._get_struct_union_enum_type('enum', node) + elif not decl.name: + raise CDefError("construct does not declare any variable", + decl) + # + if decl.name: + tp, quals = self._get_type_and_quals(node, + partial_length_ok=True) + if tp.is_raw_function: + self._declare_function(tp, quals, decl) + elif (tp.is_integer_type() and + hasattr(decl, 'init') and + hasattr(decl.init, 'value') and + _r_int_literal.match(decl.init.value)): + self._add_integer_constant(decl.name, decl.init.value) + elif (tp.is_integer_type() and + isinstance(decl.init, pycparser.c_ast.UnaryOp) and + decl.init.op == '-' and + hasattr(decl.init.expr, 'value') and + _r_int_literal.match(decl.init.expr.value)): + self._add_integer_constant(decl.name, + '-' + decl.init.expr.value) + elif (tp is model.void_type and + decl.name.startswith('__cffi_extern_python_')): + # hack: `extern "Python"` in the C source is replaced + # with "void __cffi_extern_python_start;" and + # "void __cffi_extern_python_stop;" + self._inside_extern_python = decl.name + else: + if self._inside_extern_python !='__cffi_extern_python_stop': + raise CDefError( + "cannot declare constants or " + "variables with 'extern \"Python\"'") + if (quals & model.Q_CONST) and not tp.is_array_type: + self._declare('constant ' + decl.name, tp, quals=quals) + else: + _warn_for_non_extern_non_static_global_variable(decl) + self._declare('variable ' + decl.name, tp, quals=quals) + + def parse_type(self, cdecl): + return self.parse_type_and_quals(cdecl)[0] + + def parse_type_and_quals(self, cdecl): + ast, macros = self._parse('void __dummy(\n%s\n);' % cdecl)[:2] + assert not macros + exprnode = ast.ext[-1].type.args.params[0] + if isinstance(exprnode, pycparser.c_ast.ID): + raise CDefError("unknown identifier '%s'" % (exprnode.name,)) + return self._get_type_and_quals(exprnode.type) + + def _declare(self, name, obj, included=False, quals=0): + if name in self._declarations: + prevobj, prevquals = self._declarations[name] + if prevobj is obj and prevquals == quals: + return + if not self._options.get('override'): + raise FFIError( + "multiple declarations of %s (for interactive usage, " + "try cdef(xx, override=True))" % (name,)) + assert '__dotdotdot__' not in name.split() + self._declarations[name] = (obj, quals) + if included: + self._included_declarations.add(obj) + + def _extract_quals(self, type): + quals = 0 + if isinstance(type, (pycparser.c_ast.TypeDecl, + pycparser.c_ast.PtrDecl)): + if 'const' in type.quals: + quals |= model.Q_CONST + if 'volatile' in type.quals: + quals |= model.Q_VOLATILE + if 'restrict' in type.quals: + quals |= model.Q_RESTRICT + return quals + + def _get_type_pointer(self, type, quals, declname=None): + if isinstance(type, model.RawFunctionType): + return type.as_function_pointer() + if (isinstance(type, model.StructOrUnionOrEnum) and + type.name.startswith('$') and type.name[1:].isdigit() and + type.forcename is None and declname is not None): + return model.NamedPointerType(type, declname, quals) + return model.PointerType(type, quals) + + def _get_type_and_quals(self, typenode, name=None, partial_length_ok=False, + typedef_example=None): + # first, dereference typedefs, if we have it already parsed, we're good + if (isinstance(typenode, pycparser.c_ast.TypeDecl) and + isinstance(typenode.type, pycparser.c_ast.IdentifierType) and + len(typenode.type.names) == 1 and + ('typedef ' + typenode.type.names[0]) in self._declarations): + tp, quals = self._declarations['typedef ' + typenode.type.names[0]] + quals |= self._extract_quals(typenode) + return tp, quals + # + if isinstance(typenode, pycparser.c_ast.ArrayDecl): + # array type + if typenode.dim is None: + length = None + else: + length = self._parse_constant( + typenode.dim, partial_length_ok=partial_length_ok) + # a hack: in 'typedef int foo_t[...][...];', don't use '...' as + # the length but use directly the C expression that would be + # generated by recompiler.py. This lets the typedef be used in + # many more places within recompiler.py + if typedef_example is not None: + if length == '...': + length = '_cffi_array_len(%s)' % (typedef_example,) + typedef_example = "*" + typedef_example + # + tp, quals = self._get_type_and_quals(typenode.type, + partial_length_ok=partial_length_ok, + typedef_example=typedef_example) + return model.ArrayType(tp, length), quals + # + if isinstance(typenode, pycparser.c_ast.PtrDecl): + # pointer type + itemtype, itemquals = self._get_type_and_quals(typenode.type) + tp = self._get_type_pointer(itemtype, itemquals, declname=name) + quals = self._extract_quals(typenode) + return tp, quals + # + if isinstance(typenode, pycparser.c_ast.TypeDecl): + quals = self._extract_quals(typenode) + type = typenode.type + if isinstance(type, pycparser.c_ast.IdentifierType): + # assume a primitive type. get it from .names, but reduce + # synonyms to a single chosen combination + names = list(type.names) + if names != ['signed', 'char']: # keep this unmodified + prefixes = {} + while names: + name = names[0] + if name in ('short', 'long', 'signed', 'unsigned'): + prefixes[name] = prefixes.get(name, 0) + 1 + del names[0] + else: + break + # ignore the 'signed' prefix below, and reorder the others + newnames = [] + for prefix in ('unsigned', 'short', 'long'): + for i in range(prefixes.get(prefix, 0)): + newnames.append(prefix) + if not names: + names = ['int'] # implicitly + if names == ['int']: # but kill it if 'short' or 'long' + if 'short' in prefixes or 'long' in prefixes: + names = [] + names = newnames + names + ident = ' '.join(names) + if ident == 'void': + return model.void_type, quals + if ident == '__dotdotdot__': + raise FFIError(':%d: bad usage of "..."' % + typenode.coord.line) + tp0, quals0 = resolve_common_type(self, ident) + return tp0, (quals | quals0) + # + if isinstance(type, pycparser.c_ast.Struct): + # 'struct foobar' + tp = self._get_struct_union_enum_type('struct', type, name) + return tp, quals + # + if isinstance(type, pycparser.c_ast.Union): + # 'union foobar' + tp = self._get_struct_union_enum_type('union', type, name) + return tp, quals + # + if isinstance(type, pycparser.c_ast.Enum): + # 'enum foobar' + tp = self._get_struct_union_enum_type('enum', type, name) + return tp, quals + # + if isinstance(typenode, pycparser.c_ast.FuncDecl): + # a function type + return self._parse_function_type(typenode, name), 0 + # + # nested anonymous structs or unions end up here + if isinstance(typenode, pycparser.c_ast.Struct): + return self._get_struct_union_enum_type('struct', typenode, name, + nested=True), 0 + if isinstance(typenode, pycparser.c_ast.Union): + return self._get_struct_union_enum_type('union', typenode, name, + nested=True), 0 + # + raise FFIError(":%d: bad or unsupported type declaration" % + typenode.coord.line) + + def _parse_function_type(self, typenode, funcname=None): + params = list(getattr(typenode.args, 'params', [])) + for i, arg in enumerate(params): + if not hasattr(arg, 'type'): + raise CDefError("%s arg %d: unknown type '%s'" + " (if you meant to use the old C syntax of giving" + " untyped arguments, it is not supported)" + % (funcname or 'in expression', i + 1, + getattr(arg, 'name', '?'))) + ellipsis = ( + len(params) > 0 and + isinstance(params[-1].type, pycparser.c_ast.TypeDecl) and + isinstance(params[-1].type.type, + pycparser.c_ast.IdentifierType) and + params[-1].type.type.names == ['__dotdotdot__']) + if ellipsis: + params.pop() + if not params: + raise CDefError( + "%s: a function with only '(...)' as argument" + " is not correct C" % (funcname or 'in expression')) + args = [self._as_func_arg(*self._get_type_and_quals(argdeclnode.type)) + for argdeclnode in params] + if not ellipsis and args == [model.void_type]: + args = [] + result, quals = self._get_type_and_quals(typenode.type) + # the 'quals' on the result type are ignored. HACK: we absure them + # to detect __stdcall functions: we textually replace "__stdcall" + # with "volatile volatile const" above. + abi = None + if hasattr(typenode.type, 'quals'): # else, probable syntax error anyway + if typenode.type.quals[-3:] == ['volatile', 'volatile', 'const']: + abi = '__stdcall' + return model.RawFunctionType(tuple(args), result, ellipsis, abi) + + def _as_func_arg(self, type, quals): + if isinstance(type, model.ArrayType): + return model.PointerType(type.item, quals) + elif isinstance(type, model.RawFunctionType): + return type.as_function_pointer() + else: + return type + + def _get_struct_union_enum_type(self, kind, type, name=None, nested=False): + # First, a level of caching on the exact 'type' node of the AST. + # This is obscure, but needed because pycparser "unrolls" declarations + # such as "typedef struct { } foo_t, *foo_p" and we end up with + # an AST that is not a tree, but a DAG, with the "type" node of the + # two branches foo_t and foo_p of the trees being the same node. + # It's a bit silly but detecting "DAG-ness" in the AST tree seems + # to be the only way to distinguish this case from two independent + # structs. See test_struct_with_two_usages. + try: + return self._structnode2type[type] + except KeyError: + pass + # + # Note that this must handle parsing "struct foo" any number of + # times and always return the same StructType object. Additionally, + # one of these times (not necessarily the first), the fields of + # the struct can be specified with "struct foo { ...fields... }". + # If no name is given, then we have to create a new anonymous struct + # with no caching; in this case, the fields are either specified + # right now or never. + # + force_name = name + name = type.name + # + # get the type or create it if needed + if name is None: + # 'force_name' is used to guess a more readable name for + # anonymous structs, for the common case "typedef struct { } foo". + if force_name is not None: + explicit_name = '$%s' % force_name + else: + self._anonymous_counter += 1 + explicit_name = '$%d' % self._anonymous_counter + tp = None + else: + explicit_name = name + key = '%s %s' % (kind, name) + tp, _ = self._declarations.get(key, (None, None)) + # + if tp is None: + if kind == 'struct': + tp = model.StructType(explicit_name, None, None, None) + elif kind == 'union': + tp = model.UnionType(explicit_name, None, None, None) + elif kind == 'enum': + if explicit_name == '__dotdotdot__': + raise CDefError("Enums cannot be declared with ...") + tp = self._build_enum_type(explicit_name, type.values) + else: + raise AssertionError("kind = %r" % (kind,)) + if name is not None: + self._declare(key, tp) + else: + if kind == 'enum' and type.values is not None: + raise NotImplementedError( + "enum %s: the '{}' declaration should appear on the first " + "time the enum is mentioned, not later" % explicit_name) + if not tp.forcename: + tp.force_the_name(force_name) + if tp.forcename and '$' in tp.name: + self._declare('anonymous %s' % tp.forcename, tp) + # + self._structnode2type[type] = tp + # + # enums: done here + if kind == 'enum': + return tp + # + # is there a 'type.decls'? If yes, then this is the place in the + # C sources that declare the fields. If no, then just return the + # existing type, possibly still incomplete. + if type.decls is None: + return tp + # + if tp.fldnames is not None: + raise CDefError("duplicate declaration of struct %s" % name) + fldnames = [] + fldtypes = [] + fldbitsize = [] + fldquals = [] + for decl in type.decls: + if (isinstance(decl.type, pycparser.c_ast.IdentifierType) and + ''.join(decl.type.names) == '__dotdotdot__'): + # XXX pycparser is inconsistent: 'names' should be a list + # of strings, but is sometimes just one string. Use + # str.join() as a way to cope with both. + self._make_partial(tp, nested) + continue + if decl.bitsize is None: + bitsize = -1 + else: + bitsize = self._parse_constant(decl.bitsize) + self._partial_length = False + type, fqual = self._get_type_and_quals(decl.type, + partial_length_ok=True) + if self._partial_length: + self._make_partial(tp, nested) + if isinstance(type, model.StructType) and type.partial: + self._make_partial(tp, nested) + fldnames.append(decl.name or '') + fldtypes.append(type) + fldbitsize.append(bitsize) + fldquals.append(fqual) + tp.fldnames = tuple(fldnames) + tp.fldtypes = tuple(fldtypes) + tp.fldbitsize = tuple(fldbitsize) + tp.fldquals = tuple(fldquals) + if fldbitsize != [-1] * len(fldbitsize): + if isinstance(tp, model.StructType) and tp.partial: + raise NotImplementedError("%s: using both bitfields and '...;'" + % (tp,)) + tp.packed = self._options.get('packed') + if tp.completed: # must be re-completed: it is not opaque any more + tp.completed = 0 + self._recomplete.append(tp) + return tp + + def _make_partial(self, tp, nested): + if not isinstance(tp, model.StructOrUnion): + raise CDefError("%s cannot be partial" % (tp,)) + if not tp.has_c_name() and not nested: + raise NotImplementedError("%s is partial but has no C name" %(tp,)) + tp.partial = True + + def _parse_constant(self, exprnode, partial_length_ok=False): + # for now, limited to expressions that are an immediate number + # or positive/negative number + if isinstance(exprnode, pycparser.c_ast.Constant): + s = exprnode.value + if '0' <= s[0] <= '9': + s = s.rstrip('uUlL') + try: + if s.startswith('0'): + return int(s, 8) + else: + return int(s, 10) + except ValueError: + if len(s) > 1: + if s.lower()[0:2] == '0x': + return int(s, 16) + elif s.lower()[0:2] == '0b': + return int(s, 2) + raise CDefError("invalid constant %r" % (s,)) + elif s[0] == "'" and s[-1] == "'" and ( + len(s) == 3 or (len(s) == 4 and s[1] == "\\")): + return ord(s[-2]) + else: + raise CDefError("invalid constant %r" % (s,)) + # + if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and + exprnode.op == '+'): + return self._parse_constant(exprnode.expr) + # + if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and + exprnode.op == '-'): + return -self._parse_constant(exprnode.expr) + # load previously defined int constant + if (isinstance(exprnode, pycparser.c_ast.ID) and + exprnode.name in self._int_constants): + return self._int_constants[exprnode.name] + # + if (isinstance(exprnode, pycparser.c_ast.ID) and + exprnode.name == '__dotdotdotarray__'): + if partial_length_ok: + self._partial_length = True + return '...' + raise FFIError(":%d: unsupported '[...]' here, cannot derive " + "the actual array length in this context" + % exprnode.coord.line) + # + if isinstance(exprnode, pycparser.c_ast.BinaryOp): + left = self._parse_constant(exprnode.left) + right = self._parse_constant(exprnode.right) + if exprnode.op == '+': + return left + right + elif exprnode.op == '-': + return left - right + elif exprnode.op == '*': + return left * right + elif exprnode.op == '/': + return self._c_div(left, right) + elif exprnode.op == '%': + return left - self._c_div(left, right) * right + elif exprnode.op == '<<': + return left << right + elif exprnode.op == '>>': + return left >> right + elif exprnode.op == '&': + return left & right + elif exprnode.op == '|': + return left | right + elif exprnode.op == '^': + return left ^ right + # + raise FFIError(":%d: unsupported expression: expected a " + "simple numeric constant" % exprnode.coord.line) + + def _c_div(self, a, b): + result = a // b + if ((a < 0) ^ (b < 0)) and (a % b) != 0: + result += 1 + return result + + def _build_enum_type(self, explicit_name, decls): + if decls is not None: + partial = False + enumerators = [] + enumvalues = [] + nextenumvalue = 0 + for enum in decls.enumerators: + if _r_enum_dotdotdot.match(enum.name): + partial = True + continue + if enum.value is not None: + nextenumvalue = self._parse_constant(enum.value) + enumerators.append(enum.name) + enumvalues.append(nextenumvalue) + self._add_constants(enum.name, nextenumvalue) + nextenumvalue += 1 + enumerators = tuple(enumerators) + enumvalues = tuple(enumvalues) + tp = model.EnumType(explicit_name, enumerators, enumvalues) + tp.partial = partial + else: # opaque enum + tp = model.EnumType(explicit_name, (), ()) + return tp + + def include(self, other): + for name, (tp, quals) in other._declarations.items(): + if name.startswith('anonymous $enum_$'): + continue # fix for test_anonymous_enum_include + kind = name.split(' ', 1)[0] + if kind in ('struct', 'union', 'enum', 'anonymous', 'typedef'): + self._declare(name, tp, included=True, quals=quals) + for k, v in other._int_constants.items(): + self._add_constants(k, v) + + def _get_unknown_type(self, decl): + typenames = decl.type.type.names + if typenames == ['__dotdotdot__']: + return model.unknown_type(decl.name) + + if typenames == ['__dotdotdotint__']: + if self._uses_new_feature is None: + self._uses_new_feature = "'typedef int... %s'" % decl.name + return model.UnknownIntegerType(decl.name) + + if typenames == ['__dotdotdotfloat__']: + # note: not for 'long double' so far + if self._uses_new_feature is None: + self._uses_new_feature = "'typedef float... %s'" % decl.name + return model.UnknownFloatType(decl.name) + + raise FFIError(':%d: unsupported usage of "..." in typedef' + % decl.coord.line) + + def _get_unknown_ptr_type(self, decl): + if decl.type.type.type.names == ['__dotdotdot__']: + return model.unknown_ptr_type(decl.name) + raise FFIError(':%d: unsupported usage of "..." in typedef' + % decl.coord.line) diff --git a/venv/lib/python3.12/site-packages/cffi/error.py b/venv/lib/python3.12/site-packages/cffi/error.py new file mode 100644 index 00000000..0a27247c --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi/error.py @@ -0,0 +1,31 @@ + +class FFIError(Exception): + __module__ = 'cffi' + +class CDefError(Exception): + __module__ = 'cffi' + def __str__(self): + try: + current_decl = self.args[1] + filename = current_decl.coord.file + linenum = current_decl.coord.line + prefix = '%s:%d: ' % (filename, linenum) + except (AttributeError, TypeError, IndexError): + prefix = '' + return '%s%s' % (prefix, self.args[0]) + +class VerificationError(Exception): + """ An error raised when verification fails + """ + __module__ = 'cffi' + +class VerificationMissing(Exception): + """ An error raised when incomplete structures are passed into + cdef, but no verification has been done + """ + __module__ = 'cffi' + +class PkgConfigError(Exception): + """ An error raised for missing modules in pkg-config + """ + __module__ = 'cffi' diff --git a/venv/lib/python3.12/site-packages/cffi/ffiplatform.py b/venv/lib/python3.12/site-packages/cffi/ffiplatform.py new file mode 100644 index 00000000..adca28f1 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi/ffiplatform.py @@ -0,0 +1,113 @@ +import sys, os +from .error import VerificationError + + +LIST_OF_FILE_NAMES = ['sources', 'include_dirs', 'library_dirs', + 'extra_objects', 'depends'] + +def get_extension(srcfilename, modname, sources=(), **kwds): + from cffi._shimmed_dist_utils import Extension + allsources = [srcfilename] + for src in sources: + allsources.append(os.path.normpath(src)) + return Extension(name=modname, sources=allsources, **kwds) + +def compile(tmpdir, ext, compiler_verbose=0, debug=None): + """Compile a C extension module using distutils.""" + + saved_environ = os.environ.copy() + try: + outputfilename = _build(tmpdir, ext, compiler_verbose, debug) + outputfilename = os.path.abspath(outputfilename) + finally: + # workaround for a distutils bugs where some env vars can + # become longer and longer every time it is used + for key, value in saved_environ.items(): + if os.environ.get(key) != value: + os.environ[key] = value + return outputfilename + +def _build(tmpdir, ext, compiler_verbose=0, debug=None): + # XXX compact but horrible :-( + from cffi._shimmed_dist_utils import Distribution, CompileError, LinkError, set_threshold, set_verbosity + + dist = Distribution({'ext_modules': [ext]}) + dist.parse_config_files() + options = dist.get_option_dict('build_ext') + if debug is None: + debug = sys.flags.debug + options['debug'] = ('ffiplatform', debug) + options['force'] = ('ffiplatform', True) + options['build_lib'] = ('ffiplatform', tmpdir) + options['build_temp'] = ('ffiplatform', tmpdir) + # + try: + old_level = set_threshold(0) or 0 + try: + set_verbosity(compiler_verbose) + dist.run_command('build_ext') + cmd_obj = dist.get_command_obj('build_ext') + [soname] = cmd_obj.get_outputs() + finally: + set_threshold(old_level) + except (CompileError, LinkError) as e: + raise VerificationError('%s: %s' % (e.__class__.__name__, e)) + # + return soname + +try: + from os.path import samefile +except ImportError: + def samefile(f1, f2): + return os.path.abspath(f1) == os.path.abspath(f2) + +def maybe_relative_path(path): + if not os.path.isabs(path): + return path # already relative + dir = path + names = [] + while True: + prevdir = dir + dir, name = os.path.split(prevdir) + if dir == prevdir or not dir: + return path # failed to make it relative + names.append(name) + try: + if samefile(dir, os.curdir): + names.reverse() + return os.path.join(*names) + except OSError: + pass + +# ____________________________________________________________ + +try: + int_or_long = (int, long) + import cStringIO +except NameError: + int_or_long = int # Python 3 + import io as cStringIO + +def _flatten(x, f): + if isinstance(x, str): + f.write('%ds%s' % (len(x), x)) + elif isinstance(x, dict): + keys = sorted(x.keys()) + f.write('%dd' % len(keys)) + for key in keys: + _flatten(key, f) + _flatten(x[key], f) + elif isinstance(x, (list, tuple)): + f.write('%dl' % len(x)) + for value in x: + _flatten(value, f) + elif isinstance(x, int_or_long): + f.write('%di' % (x,)) + else: + raise TypeError( + "the keywords to verify() contains unsupported object %r" % (x,)) + +def flatten(x): + f = cStringIO.StringIO() + _flatten(x, f) + return f.getvalue() diff --git a/venv/lib/python3.12/site-packages/cffi/lock.py b/venv/lib/python3.12/site-packages/cffi/lock.py new file mode 100644 index 00000000..db91b715 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi/lock.py @@ -0,0 +1,30 @@ +import sys + +if sys.version_info < (3,): + try: + from thread import allocate_lock + except ImportError: + from dummy_thread import allocate_lock +else: + try: + from _thread import allocate_lock + except ImportError: + from _dummy_thread import allocate_lock + + +##import sys +##l1 = allocate_lock + +##class allocate_lock(object): +## def __init__(self): +## self._real = l1() +## def __enter__(self): +## for i in range(4, 0, -1): +## print sys._getframe(i).f_code +## print +## return self._real.__enter__() +## def __exit__(self, *args): +## return self._real.__exit__(*args) +## def acquire(self, f): +## assert f is False +## return self._real.acquire(f) diff --git a/venv/lib/python3.12/site-packages/cffi/model.py b/venv/lib/python3.12/site-packages/cffi/model.py new file mode 100644 index 00000000..e5f4cae3 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi/model.py @@ -0,0 +1,618 @@ +import types +import weakref + +from .lock import allocate_lock +from .error import CDefError, VerificationError, VerificationMissing + +# type qualifiers +Q_CONST = 0x01 +Q_RESTRICT = 0x02 +Q_VOLATILE = 0x04 + +def qualify(quals, replace_with): + if quals & Q_CONST: + replace_with = ' const ' + replace_with.lstrip() + if quals & Q_VOLATILE: + replace_with = ' volatile ' + replace_with.lstrip() + if quals & Q_RESTRICT: + # It seems that __restrict is supported by gcc and msvc. + # If you hit some different compiler, add a #define in + # _cffi_include.h for it (and in its copies, documented there) + replace_with = ' __restrict ' + replace_with.lstrip() + return replace_with + + +class BaseTypeByIdentity(object): + is_array_type = False + is_raw_function = False + + def get_c_name(self, replace_with='', context='a C file', quals=0): + result = self.c_name_with_marker + assert result.count('&') == 1 + # some logic duplication with ffi.getctype()... :-( + replace_with = replace_with.strip() + if replace_with: + if replace_with.startswith('*') and '&[' in result: + replace_with = '(%s)' % replace_with + elif not replace_with[0] in '[(': + replace_with = ' ' + replace_with + replace_with = qualify(quals, replace_with) + result = result.replace('&', replace_with) + if '$' in result: + raise VerificationError( + "cannot generate '%s' in %s: unknown type name" + % (self._get_c_name(), context)) + return result + + def _get_c_name(self): + return self.c_name_with_marker.replace('&', '') + + def has_c_name(self): + return '$' not in self._get_c_name() + + def is_integer_type(self): + return False + + def get_cached_btype(self, ffi, finishlist, can_delay=False): + try: + BType = ffi._cached_btypes[self] + except KeyError: + BType = self.build_backend_type(ffi, finishlist) + BType2 = ffi._cached_btypes.setdefault(self, BType) + assert BType2 is BType + return BType + + def __repr__(self): + return '<%s>' % (self._get_c_name(),) + + def _get_items(self): + return [(name, getattr(self, name)) for name in self._attrs_] + + +class BaseType(BaseTypeByIdentity): + + def __eq__(self, other): + return (self.__class__ == other.__class__ and + self._get_items() == other._get_items()) + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash((self.__class__, tuple(self._get_items()))) + + +class VoidType(BaseType): + _attrs_ = () + + def __init__(self): + self.c_name_with_marker = 'void&' + + def build_backend_type(self, ffi, finishlist): + return global_cache(self, ffi, 'new_void_type') + +void_type = VoidType() + + +class BasePrimitiveType(BaseType): + def is_complex_type(self): + return False + + +class PrimitiveType(BasePrimitiveType): + _attrs_ = ('name',) + + ALL_PRIMITIVE_TYPES = { + 'char': 'c', + 'short': 'i', + 'int': 'i', + 'long': 'i', + 'long long': 'i', + 'signed char': 'i', + 'unsigned char': 'i', + 'unsigned short': 'i', + 'unsigned int': 'i', + 'unsigned long': 'i', + 'unsigned long long': 'i', + 'float': 'f', + 'double': 'f', + 'long double': 'f', + '_cffi_float_complex_t': 'j', + '_cffi_double_complex_t': 'j', + '_Bool': 'i', + # the following types are not primitive in the C sense + 'wchar_t': 'c', + 'char16_t': 'c', + 'char32_t': 'c', + 'int8_t': 'i', + 'uint8_t': 'i', + 'int16_t': 'i', + 'uint16_t': 'i', + 'int32_t': 'i', + 'uint32_t': 'i', + 'int64_t': 'i', + 'uint64_t': 'i', + 'int_least8_t': 'i', + 'uint_least8_t': 'i', + 'int_least16_t': 'i', + 'uint_least16_t': 'i', + 'int_least32_t': 'i', + 'uint_least32_t': 'i', + 'int_least64_t': 'i', + 'uint_least64_t': 'i', + 'int_fast8_t': 'i', + 'uint_fast8_t': 'i', + 'int_fast16_t': 'i', + 'uint_fast16_t': 'i', + 'int_fast32_t': 'i', + 'uint_fast32_t': 'i', + 'int_fast64_t': 'i', + 'uint_fast64_t': 'i', + 'intptr_t': 'i', + 'uintptr_t': 'i', + 'intmax_t': 'i', + 'uintmax_t': 'i', + 'ptrdiff_t': 'i', + 'size_t': 'i', + 'ssize_t': 'i', + } + + def __init__(self, name): + assert name in self.ALL_PRIMITIVE_TYPES + self.name = name + self.c_name_with_marker = name + '&' + + def is_char_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'c' + def is_integer_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'i' + def is_float_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'f' + def is_complex_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'j' + + def build_backend_type(self, ffi, finishlist): + return global_cache(self, ffi, 'new_primitive_type', self.name) + + +class UnknownIntegerType(BasePrimitiveType): + _attrs_ = ('name',) + + def __init__(self, name): + self.name = name + self.c_name_with_marker = name + '&' + + def is_integer_type(self): + return True + + def build_backend_type(self, ffi, finishlist): + raise NotImplementedError("integer type '%s' can only be used after " + "compilation" % self.name) + +class UnknownFloatType(BasePrimitiveType): + _attrs_ = ('name', ) + + def __init__(self, name): + self.name = name + self.c_name_with_marker = name + '&' + + def build_backend_type(self, ffi, finishlist): + raise NotImplementedError("float type '%s' can only be used after " + "compilation" % self.name) + + +class BaseFunctionType(BaseType): + _attrs_ = ('args', 'result', 'ellipsis', 'abi') + + def __init__(self, args, result, ellipsis, abi=None): + self.args = args + self.result = result + self.ellipsis = ellipsis + self.abi = abi + # + reprargs = [arg._get_c_name() for arg in self.args] + if self.ellipsis: + reprargs.append('...') + reprargs = reprargs or ['void'] + replace_with = self._base_pattern % (', '.join(reprargs),) + if abi is not None: + replace_with = replace_with[:1] + abi + ' ' + replace_with[1:] + self.c_name_with_marker = ( + self.result.c_name_with_marker.replace('&', replace_with)) + + +class RawFunctionType(BaseFunctionType): + # Corresponds to a C type like 'int(int)', which is the C type of + # a function, but not a pointer-to-function. The backend has no + # notion of such a type; it's used temporarily by parsing. + _base_pattern = '(&)(%s)' + is_raw_function = True + + def build_backend_type(self, ffi, finishlist): + raise CDefError("cannot render the type %r: it is a function " + "type, not a pointer-to-function type" % (self,)) + + def as_function_pointer(self): + return FunctionPtrType(self.args, self.result, self.ellipsis, self.abi) + + +class FunctionPtrType(BaseFunctionType): + _base_pattern = '(*&)(%s)' + + def build_backend_type(self, ffi, finishlist): + result = self.result.get_cached_btype(ffi, finishlist) + args = [] + for tp in self.args: + args.append(tp.get_cached_btype(ffi, finishlist)) + abi_args = () + if self.abi == "__stdcall": + if not self.ellipsis: # __stdcall ignored for variadic funcs + try: + abi_args = (ffi._backend.FFI_STDCALL,) + except AttributeError: + pass + return global_cache(self, ffi, 'new_function_type', + tuple(args), result, self.ellipsis, *abi_args) + + def as_raw_function(self): + return RawFunctionType(self.args, self.result, self.ellipsis, self.abi) + + +class PointerType(BaseType): + _attrs_ = ('totype', 'quals') + + def __init__(self, totype, quals=0): + self.totype = totype + self.quals = quals + extra = " *&" + if totype.is_array_type: + extra = "(%s)" % (extra.lstrip(),) + extra = qualify(quals, extra) + self.c_name_with_marker = totype.c_name_with_marker.replace('&', extra) + + def build_backend_type(self, ffi, finishlist): + BItem = self.totype.get_cached_btype(ffi, finishlist, can_delay=True) + return global_cache(self, ffi, 'new_pointer_type', BItem) + +voidp_type = PointerType(void_type) + +def ConstPointerType(totype): + return PointerType(totype, Q_CONST) + +const_voidp_type = ConstPointerType(void_type) + + +class NamedPointerType(PointerType): + _attrs_ = ('totype', 'name') + + def __init__(self, totype, name, quals=0): + PointerType.__init__(self, totype, quals) + self.name = name + self.c_name_with_marker = name + '&' + + +class ArrayType(BaseType): + _attrs_ = ('item', 'length') + is_array_type = True + + def __init__(self, item, length): + self.item = item + self.length = length + # + if length is None: + brackets = '&[]' + elif length == '...': + brackets = '&[/*...*/]' + else: + brackets = '&[%s]' % length + self.c_name_with_marker = ( + self.item.c_name_with_marker.replace('&', brackets)) + + def length_is_unknown(self): + return isinstance(self.length, str) + + def resolve_length(self, newlength): + return ArrayType(self.item, newlength) + + def build_backend_type(self, ffi, finishlist): + if self.length_is_unknown(): + raise CDefError("cannot render the type %r: unknown length" % + (self,)) + self.item.get_cached_btype(ffi, finishlist) # force the item BType + BPtrItem = PointerType(self.item).get_cached_btype(ffi, finishlist) + return global_cache(self, ffi, 'new_array_type', BPtrItem, self.length) + +char_array_type = ArrayType(PrimitiveType('char'), None) + + +class StructOrUnionOrEnum(BaseTypeByIdentity): + _attrs_ = ('name',) + forcename = None + + def build_c_name_with_marker(self): + name = self.forcename or '%s %s' % (self.kind, self.name) + self.c_name_with_marker = name + '&' + + def force_the_name(self, forcename): + self.forcename = forcename + self.build_c_name_with_marker() + + def get_official_name(self): + assert self.c_name_with_marker.endswith('&') + return self.c_name_with_marker[:-1] + + +class StructOrUnion(StructOrUnionOrEnum): + fixedlayout = None + completed = 0 + partial = False + packed = 0 + + def __init__(self, name, fldnames, fldtypes, fldbitsize, fldquals=None): + self.name = name + self.fldnames = fldnames + self.fldtypes = fldtypes + self.fldbitsize = fldbitsize + self.fldquals = fldquals + self.build_c_name_with_marker() + + def anonymous_struct_fields(self): + if self.fldtypes is not None: + for name, type in zip(self.fldnames, self.fldtypes): + if name == '' and isinstance(type, StructOrUnion): + yield type + + def enumfields(self, expand_anonymous_struct_union=True): + fldquals = self.fldquals + if fldquals is None: + fldquals = (0,) * len(self.fldnames) + for name, type, bitsize, quals in zip(self.fldnames, self.fldtypes, + self.fldbitsize, fldquals): + if (name == '' and isinstance(type, StructOrUnion) + and expand_anonymous_struct_union): + # nested anonymous struct/union + for result in type.enumfields(): + yield result + else: + yield (name, type, bitsize, quals) + + def force_flatten(self): + # force the struct or union to have a declaration that lists + # directly all fields returned by enumfields(), flattening + # nested anonymous structs/unions. + names = [] + types = [] + bitsizes = [] + fldquals = [] + for name, type, bitsize, quals in self.enumfields(): + names.append(name) + types.append(type) + bitsizes.append(bitsize) + fldquals.append(quals) + self.fldnames = tuple(names) + self.fldtypes = tuple(types) + self.fldbitsize = tuple(bitsizes) + self.fldquals = tuple(fldquals) + + def get_cached_btype(self, ffi, finishlist, can_delay=False): + BType = StructOrUnionOrEnum.get_cached_btype(self, ffi, finishlist, + can_delay) + if not can_delay: + self.finish_backend_type(ffi, finishlist) + return BType + + def finish_backend_type(self, ffi, finishlist): + if self.completed: + if self.completed != 2: + raise NotImplementedError("recursive structure declaration " + "for '%s'" % (self.name,)) + return + BType = ffi._cached_btypes[self] + # + self.completed = 1 + # + if self.fldtypes is None: + pass # not completing it: it's an opaque struct + # + elif self.fixedlayout is None: + fldtypes = [tp.get_cached_btype(ffi, finishlist) + for tp in self.fldtypes] + lst = list(zip(self.fldnames, fldtypes, self.fldbitsize)) + extra_flags = () + if self.packed: + if self.packed == 1: + extra_flags = (8,) # SF_PACKED + else: + extra_flags = (0, self.packed) + ffi._backend.complete_struct_or_union(BType, lst, self, + -1, -1, *extra_flags) + # + else: + fldtypes = [] + fieldofs, fieldsize, totalsize, totalalignment = self.fixedlayout + for i in range(len(self.fldnames)): + fsize = fieldsize[i] + ftype = self.fldtypes[i] + # + if isinstance(ftype, ArrayType) and ftype.length_is_unknown(): + # fix the length to match the total size + BItemType = ftype.item.get_cached_btype(ffi, finishlist) + nlen, nrest = divmod(fsize, ffi.sizeof(BItemType)) + if nrest != 0: + self._verification_error( + "field '%s.%s' has a bogus size?" % ( + self.name, self.fldnames[i] or '{}')) + ftype = ftype.resolve_length(nlen) + self.fldtypes = (self.fldtypes[:i] + (ftype,) + + self.fldtypes[i+1:]) + # + BFieldType = ftype.get_cached_btype(ffi, finishlist) + if isinstance(ftype, ArrayType) and ftype.length is None: + assert fsize == 0 + else: + bitemsize = ffi.sizeof(BFieldType) + if bitemsize != fsize: + self._verification_error( + "field '%s.%s' is declared as %d bytes, but is " + "really %d bytes" % (self.name, + self.fldnames[i] or '{}', + bitemsize, fsize)) + fldtypes.append(BFieldType) + # + lst = list(zip(self.fldnames, fldtypes, self.fldbitsize, fieldofs)) + ffi._backend.complete_struct_or_union(BType, lst, self, + totalsize, totalalignment) + self.completed = 2 + + def _verification_error(self, msg): + raise VerificationError(msg) + + def check_not_partial(self): + if self.partial and self.fixedlayout is None: + raise VerificationMissing(self._get_c_name()) + + def build_backend_type(self, ffi, finishlist): + self.check_not_partial() + finishlist.append(self) + # + return global_cache(self, ffi, 'new_%s_type' % self.kind, + self.get_official_name(), key=self) + + +class StructType(StructOrUnion): + kind = 'struct' + + +class UnionType(StructOrUnion): + kind = 'union' + + +class EnumType(StructOrUnionOrEnum): + kind = 'enum' + partial = False + partial_resolved = False + + def __init__(self, name, enumerators, enumvalues, baseinttype=None): + self.name = name + self.enumerators = enumerators + self.enumvalues = enumvalues + self.baseinttype = baseinttype + self.build_c_name_with_marker() + + def force_the_name(self, forcename): + StructOrUnionOrEnum.force_the_name(self, forcename) + if self.forcename is None: + name = self.get_official_name() + self.forcename = '$' + name.replace(' ', '_') + + def check_not_partial(self): + if self.partial and not self.partial_resolved: + raise VerificationMissing(self._get_c_name()) + + def build_backend_type(self, ffi, finishlist): + self.check_not_partial() + base_btype = self.build_baseinttype(ffi, finishlist) + return global_cache(self, ffi, 'new_enum_type', + self.get_official_name(), + self.enumerators, self.enumvalues, + base_btype, key=self) + + def build_baseinttype(self, ffi, finishlist): + if self.baseinttype is not None: + return self.baseinttype.get_cached_btype(ffi, finishlist) + # + if self.enumvalues: + smallest_value = min(self.enumvalues) + largest_value = max(self.enumvalues) + else: + import warnings + try: + # XXX! The goal is to ensure that the warnings.warn() + # will not suppress the warning. We want to get it + # several times if we reach this point several times. + __warningregistry__.clear() + except NameError: + pass + warnings.warn("%r has no values explicitly defined; " + "guessing that it is equivalent to 'unsigned int'" + % self._get_c_name()) + smallest_value = largest_value = 0 + if smallest_value < 0: # needs a signed type + sign = 1 + candidate1 = PrimitiveType("int") + candidate2 = PrimitiveType("long") + else: + sign = 0 + candidate1 = PrimitiveType("unsigned int") + candidate2 = PrimitiveType("unsigned long") + btype1 = candidate1.get_cached_btype(ffi, finishlist) + btype2 = candidate2.get_cached_btype(ffi, finishlist) + size1 = ffi.sizeof(btype1) + size2 = ffi.sizeof(btype2) + if (smallest_value >= ((-1) << (8*size1-1)) and + largest_value < (1 << (8*size1-sign))): + return btype1 + if (smallest_value >= ((-1) << (8*size2-1)) and + largest_value < (1 << (8*size2-sign))): + return btype2 + raise CDefError("%s values don't all fit into either 'long' " + "or 'unsigned long'" % self._get_c_name()) + +def unknown_type(name, structname=None): + if structname is None: + structname = '$%s' % name + tp = StructType(structname, None, None, None) + tp.force_the_name(name) + tp.origin = "unknown_type" + return tp + +def unknown_ptr_type(name, structname=None): + if structname is None: + structname = '$$%s' % name + tp = StructType(structname, None, None, None) + return NamedPointerType(tp, name) + + +global_lock = allocate_lock() +_typecache_cffi_backend = weakref.WeakValueDictionary() + +def get_typecache(backend): + # returns _typecache_cffi_backend if backend is the _cffi_backend + # module, or type(backend).__typecache if backend is an instance of + # CTypesBackend (or some FakeBackend class during tests) + if isinstance(backend, types.ModuleType): + return _typecache_cffi_backend + with global_lock: + if not hasattr(type(backend), '__typecache'): + type(backend).__typecache = weakref.WeakValueDictionary() + return type(backend).__typecache + +def global_cache(srctype, ffi, funcname, *args, **kwds): + key = kwds.pop('key', (funcname, args)) + assert not kwds + try: + return ffi._typecache[key] + except KeyError: + pass + try: + res = getattr(ffi._backend, funcname)(*args) + except NotImplementedError as e: + raise NotImplementedError("%s: %r: %s" % (funcname, srctype, e)) + # note that setdefault() on WeakValueDictionary is not atomic + # and contains a rare bug (http://bugs.python.org/issue19542); + # we have to use a lock and do it ourselves + cache = ffi._typecache + with global_lock: + res1 = cache.get(key) + if res1 is None: + cache[key] = res + return res + else: + return res1 + +def pointer_cache(ffi, BType): + return global_cache('?', ffi, 'new_pointer_type', BType) + +def attach_exception_info(e, name): + if e.args and type(e.args[0]) is str: + e.args = ('%s: %s' % (name, e.args[0]),) + e.args[1:] diff --git a/venv/lib/python3.12/site-packages/cffi/parse_c_type.h b/venv/lib/python3.12/site-packages/cffi/parse_c_type.h new file mode 100644 index 00000000..84e4ef85 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi/parse_c_type.h @@ -0,0 +1,181 @@ + +/* This part is from file 'cffi/parse_c_type.h'. It is copied at the + beginning of C sources generated by CFFI's ffi.set_source(). */ + +typedef void *_cffi_opcode_t; + +#define _CFFI_OP(opcode, arg) (_cffi_opcode_t)(opcode | (((uintptr_t)(arg)) << 8)) +#define _CFFI_GETOP(cffi_opcode) ((unsigned char)(uintptr_t)cffi_opcode) +#define _CFFI_GETARG(cffi_opcode) (((intptr_t)cffi_opcode) >> 8) + +#define _CFFI_OP_PRIMITIVE 1 +#define _CFFI_OP_POINTER 3 +#define _CFFI_OP_ARRAY 5 +#define _CFFI_OP_OPEN_ARRAY 7 +#define _CFFI_OP_STRUCT_UNION 9 +#define _CFFI_OP_ENUM 11 +#define _CFFI_OP_FUNCTION 13 +#define _CFFI_OP_FUNCTION_END 15 +#define _CFFI_OP_NOOP 17 +#define _CFFI_OP_BITFIELD 19 +#define _CFFI_OP_TYPENAME 21 +#define _CFFI_OP_CPYTHON_BLTN_V 23 // varargs +#define _CFFI_OP_CPYTHON_BLTN_N 25 // noargs +#define _CFFI_OP_CPYTHON_BLTN_O 27 // O (i.e. a single arg) +#define _CFFI_OP_CONSTANT 29 +#define _CFFI_OP_CONSTANT_INT 31 +#define _CFFI_OP_GLOBAL_VAR 33 +#define _CFFI_OP_DLOPEN_FUNC 35 +#define _CFFI_OP_DLOPEN_CONST 37 +#define _CFFI_OP_GLOBAL_VAR_F 39 +#define _CFFI_OP_EXTERN_PYTHON 41 + +#define _CFFI_PRIM_VOID 0 +#define _CFFI_PRIM_BOOL 1 +#define _CFFI_PRIM_CHAR 2 +#define _CFFI_PRIM_SCHAR 3 +#define _CFFI_PRIM_UCHAR 4 +#define _CFFI_PRIM_SHORT 5 +#define _CFFI_PRIM_USHORT 6 +#define _CFFI_PRIM_INT 7 +#define _CFFI_PRIM_UINT 8 +#define _CFFI_PRIM_LONG 9 +#define _CFFI_PRIM_ULONG 10 +#define _CFFI_PRIM_LONGLONG 11 +#define _CFFI_PRIM_ULONGLONG 12 +#define _CFFI_PRIM_FLOAT 13 +#define _CFFI_PRIM_DOUBLE 14 +#define _CFFI_PRIM_LONGDOUBLE 15 + +#define _CFFI_PRIM_WCHAR 16 +#define _CFFI_PRIM_INT8 17 +#define _CFFI_PRIM_UINT8 18 +#define _CFFI_PRIM_INT16 19 +#define _CFFI_PRIM_UINT16 20 +#define _CFFI_PRIM_INT32 21 +#define _CFFI_PRIM_UINT32 22 +#define _CFFI_PRIM_INT64 23 +#define _CFFI_PRIM_UINT64 24 +#define _CFFI_PRIM_INTPTR 25 +#define _CFFI_PRIM_UINTPTR 26 +#define _CFFI_PRIM_PTRDIFF 27 +#define _CFFI_PRIM_SIZE 28 +#define _CFFI_PRIM_SSIZE 29 +#define _CFFI_PRIM_INT_LEAST8 30 +#define _CFFI_PRIM_UINT_LEAST8 31 +#define _CFFI_PRIM_INT_LEAST16 32 +#define _CFFI_PRIM_UINT_LEAST16 33 +#define _CFFI_PRIM_INT_LEAST32 34 +#define _CFFI_PRIM_UINT_LEAST32 35 +#define _CFFI_PRIM_INT_LEAST64 36 +#define _CFFI_PRIM_UINT_LEAST64 37 +#define _CFFI_PRIM_INT_FAST8 38 +#define _CFFI_PRIM_UINT_FAST8 39 +#define _CFFI_PRIM_INT_FAST16 40 +#define _CFFI_PRIM_UINT_FAST16 41 +#define _CFFI_PRIM_INT_FAST32 42 +#define _CFFI_PRIM_UINT_FAST32 43 +#define _CFFI_PRIM_INT_FAST64 44 +#define _CFFI_PRIM_UINT_FAST64 45 +#define _CFFI_PRIM_INTMAX 46 +#define _CFFI_PRIM_UINTMAX 47 +#define _CFFI_PRIM_FLOATCOMPLEX 48 +#define _CFFI_PRIM_DOUBLECOMPLEX 49 +#define _CFFI_PRIM_CHAR16 50 +#define _CFFI_PRIM_CHAR32 51 + +#define _CFFI__NUM_PRIM 52 +#define _CFFI__UNKNOWN_PRIM (-1) +#define _CFFI__UNKNOWN_FLOAT_PRIM (-2) +#define _CFFI__UNKNOWN_LONG_DOUBLE (-3) + +#define _CFFI__IO_FILE_STRUCT (-1) + + +struct _cffi_global_s { + const char *name; + void *address; + _cffi_opcode_t type_op; + void *size_or_direct_fn; // OP_GLOBAL_VAR: size, or 0 if unknown + // OP_CPYTHON_BLTN_*: addr of direct function +}; + +struct _cffi_getconst_s { + unsigned long long value; + const struct _cffi_type_context_s *ctx; + int gindex; +}; + +struct _cffi_struct_union_s { + const char *name; + int type_index; // -> _cffi_types, on a OP_STRUCT_UNION + int flags; // _CFFI_F_* flags below + size_t size; + int alignment; + int first_field_index; // -> _cffi_fields array + int num_fields; +}; +#define _CFFI_F_UNION 0x01 // is a union, not a struct +#define _CFFI_F_CHECK_FIELDS 0x02 // complain if fields are not in the + // "standard layout" or if some are missing +#define _CFFI_F_PACKED 0x04 // for CHECK_FIELDS, assume a packed struct +#define _CFFI_F_EXTERNAL 0x08 // in some other ffi.include() +#define _CFFI_F_OPAQUE 0x10 // opaque + +struct _cffi_field_s { + const char *name; + size_t field_offset; + size_t field_size; + _cffi_opcode_t field_type_op; +}; + +struct _cffi_enum_s { + const char *name; + int type_index; // -> _cffi_types, on a OP_ENUM + int type_prim; // _CFFI_PRIM_xxx + const char *enumerators; // comma-delimited string +}; + +struct _cffi_typename_s { + const char *name; + int type_index; /* if opaque, points to a possibly artificial + OP_STRUCT which is itself opaque */ +}; + +struct _cffi_type_context_s { + _cffi_opcode_t *types; + const struct _cffi_global_s *globals; + const struct _cffi_field_s *fields; + const struct _cffi_struct_union_s *struct_unions; + const struct _cffi_enum_s *enums; + const struct _cffi_typename_s *typenames; + int num_globals; + int num_struct_unions; + int num_enums; + int num_typenames; + const char *const *includes; + int num_types; + int flags; /* future extension */ +}; + +struct _cffi_parse_info_s { + const struct _cffi_type_context_s *ctx; + _cffi_opcode_t *output; + unsigned int output_size; + size_t error_location; + const char *error_message; +}; + +struct _cffi_externpy_s { + const char *name; + size_t size_of_result; + void *reserved1, *reserved2; +}; + +#ifdef _CFFI_INTERNAL +static int parse_c_type(struct _cffi_parse_info_s *info, const char *input); +static int search_in_globals(const struct _cffi_type_context_s *ctx, + const char *search, size_t search_len); +static int search_in_struct_unions(const struct _cffi_type_context_s *ctx, + const char *search, size_t search_len); +#endif diff --git a/venv/lib/python3.12/site-packages/cffi/pkgconfig.py b/venv/lib/python3.12/site-packages/cffi/pkgconfig.py new file mode 100644 index 00000000..5c93f15a --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi/pkgconfig.py @@ -0,0 +1,121 @@ +# pkg-config, https://www.freedesktop.org/wiki/Software/pkg-config/ integration for cffi +import sys, os, subprocess + +from .error import PkgConfigError + + +def merge_flags(cfg1, cfg2): + """Merge values from cffi config flags cfg2 to cf1 + + Example: + merge_flags({"libraries": ["one"]}, {"libraries": ["two"]}) + {"libraries": ["one", "two"]} + """ + for key, value in cfg2.items(): + if key not in cfg1: + cfg1[key] = value + else: + if not isinstance(cfg1[key], list): + raise TypeError("cfg1[%r] should be a list of strings" % (key,)) + if not isinstance(value, list): + raise TypeError("cfg2[%r] should be a list of strings" % (key,)) + cfg1[key].extend(value) + return cfg1 + + +def call(libname, flag, encoding=sys.getfilesystemencoding()): + """Calls pkg-config and returns the output if found + """ + a = ["pkg-config", "--print-errors"] + a.append(flag) + a.append(libname) + try: + pc = subprocess.Popen(a, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + except EnvironmentError as e: + raise PkgConfigError("cannot run pkg-config: %s" % (str(e).strip(),)) + + bout, berr = pc.communicate() + if pc.returncode != 0: + try: + berr = berr.decode(encoding) + except Exception: + pass + raise PkgConfigError(berr.strip()) + + if sys.version_info >= (3,) and not isinstance(bout, str): # Python 3.x + try: + bout = bout.decode(encoding) + except UnicodeDecodeError: + raise PkgConfigError("pkg-config %s %s returned bytes that cannot " + "be decoded with encoding %r:\n%r" % + (flag, libname, encoding, bout)) + + if os.altsep != '\\' and '\\' in bout: + raise PkgConfigError("pkg-config %s %s returned an unsupported " + "backslash-escaped output:\n%r" % + (flag, libname, bout)) + return bout + + +def flags_from_pkgconfig(libs): + r"""Return compiler line flags for FFI.set_source based on pkg-config output + + Usage + ... + ffibuilder.set_source("_foo", pkgconfig = ["libfoo", "libbar >= 1.8.3"]) + + If pkg-config is installed on build machine, then arguments include_dirs, + library_dirs, libraries, define_macros, extra_compile_args and + extra_link_args are extended with an output of pkg-config for libfoo and + libbar. + + Raises PkgConfigError in case the pkg-config call fails. + """ + + def get_include_dirs(string): + return [x[2:] for x in string.split() if x.startswith("-I")] + + def get_library_dirs(string): + return [x[2:] for x in string.split() if x.startswith("-L")] + + def get_libraries(string): + return [x[2:] for x in string.split() if x.startswith("-l")] + + # convert -Dfoo=bar to list of tuples [("foo", "bar")] expected by distutils + def get_macros(string): + def _macro(x): + x = x[2:] # drop "-D" + if '=' in x: + return tuple(x.split("=", 1)) # "-Dfoo=bar" => ("foo", "bar") + else: + return (x, None) # "-Dfoo" => ("foo", None) + return [_macro(x) for x in string.split() if x.startswith("-D")] + + def get_other_cflags(string): + return [x for x in string.split() if not x.startswith("-I") and + not x.startswith("-D")] + + def get_other_libs(string): + return [x for x in string.split() if not x.startswith("-L") and + not x.startswith("-l")] + + # return kwargs for given libname + def kwargs(libname): + fse = sys.getfilesystemencoding() + all_cflags = call(libname, "--cflags") + all_libs = call(libname, "--libs") + return { + "include_dirs": get_include_dirs(all_cflags), + "library_dirs": get_library_dirs(all_libs), + "libraries": get_libraries(all_libs), + "define_macros": get_macros(all_cflags), + "extra_compile_args": get_other_cflags(all_cflags), + "extra_link_args": get_other_libs(all_libs), + } + + # merge all arguments together + ret = {} + for libname in libs: + lib_flags = kwargs(libname) + merge_flags(ret, lib_flags) + return ret diff --git a/venv/lib/python3.12/site-packages/cffi/recompiler.py b/venv/lib/python3.12/site-packages/cffi/recompiler.py new file mode 100644 index 00000000..7734a348 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi/recompiler.py @@ -0,0 +1,1598 @@ +import io, os, sys, sysconfig +from . import ffiplatform, model +from .error import VerificationError +from .cffi_opcode import * + +VERSION_BASE = 0x2601 +VERSION_EMBEDDED = 0x2701 +VERSION_CHAR16CHAR32 = 0x2801 + +USE_LIMITED_API = ((sys.platform != 'win32' or sys.version_info < (3, 0) or + sys.version_info >= (3, 5)) and + not sysconfig.get_config_var("Py_GIL_DISABLED")) # free-threaded doesn't yet support limited API + +class GlobalExpr: + def __init__(self, name, address, type_op, size=0, check_value=0): + self.name = name + self.address = address + self.type_op = type_op + self.size = size + self.check_value = check_value + + def as_c_expr(self): + return ' { "%s", (void *)%s, %s, (void *)%s },' % ( + self.name, self.address, self.type_op.as_c_expr(), self.size) + + def as_python_expr(self): + return "b'%s%s',%d" % (self.type_op.as_python_bytes(), self.name, + self.check_value) + +class FieldExpr: + def __init__(self, name, field_offset, field_size, fbitsize, field_type_op): + self.name = name + self.field_offset = field_offset + self.field_size = field_size + self.fbitsize = fbitsize + self.field_type_op = field_type_op + + def as_c_expr(self): + spaces = " " * len(self.name) + return (' { "%s", %s,\n' % (self.name, self.field_offset) + + ' %s %s,\n' % (spaces, self.field_size) + + ' %s %s },' % (spaces, self.field_type_op.as_c_expr())) + + def as_python_expr(self): + raise NotImplementedError + + def as_field_python_expr(self): + if self.field_type_op.op == OP_NOOP: + size_expr = '' + elif self.field_type_op.op == OP_BITFIELD: + size_expr = format_four_bytes(self.fbitsize) + else: + raise NotImplementedError + return "b'%s%s%s'" % (self.field_type_op.as_python_bytes(), + size_expr, + self.name) + +class StructUnionExpr: + def __init__(self, name, type_index, flags, size, alignment, comment, + first_field_index, c_fields): + self.name = name + self.type_index = type_index + self.flags = flags + self.size = size + self.alignment = alignment + self.comment = comment + self.first_field_index = first_field_index + self.c_fields = c_fields + + def as_c_expr(self): + return (' { "%s", %d, %s,' % (self.name, self.type_index, self.flags) + + '\n %s, %s, ' % (self.size, self.alignment) + + '%d, %d ' % (self.first_field_index, len(self.c_fields)) + + ('/* %s */ ' % self.comment if self.comment else '') + + '},') + + def as_python_expr(self): + flags = eval(self.flags, G_FLAGS) + fields_expr = [c_field.as_field_python_expr() + for c_field in self.c_fields] + return "(b'%s%s%s',%s)" % ( + format_four_bytes(self.type_index), + format_four_bytes(flags), + self.name, + ','.join(fields_expr)) + +class EnumExpr: + def __init__(self, name, type_index, size, signed, allenums): + self.name = name + self.type_index = type_index + self.size = size + self.signed = signed + self.allenums = allenums + + def as_c_expr(self): + return (' { "%s", %d, _cffi_prim_int(%s, %s),\n' + ' "%s" },' % (self.name, self.type_index, + self.size, self.signed, self.allenums)) + + def as_python_expr(self): + prim_index = { + (1, 0): PRIM_UINT8, (1, 1): PRIM_INT8, + (2, 0): PRIM_UINT16, (2, 1): PRIM_INT16, + (4, 0): PRIM_UINT32, (4, 1): PRIM_INT32, + (8, 0): PRIM_UINT64, (8, 1): PRIM_INT64, + }[self.size, self.signed] + return "b'%s%s%s\\x00%s'" % (format_four_bytes(self.type_index), + format_four_bytes(prim_index), + self.name, self.allenums) + +class TypenameExpr: + def __init__(self, name, type_index): + self.name = name + self.type_index = type_index + + def as_c_expr(self): + return ' { "%s", %d },' % (self.name, self.type_index) + + def as_python_expr(self): + return "b'%s%s'" % (format_four_bytes(self.type_index), self.name) + + +# ____________________________________________________________ + + +class Recompiler: + _num_externpy = 0 + + def __init__(self, ffi, module_name, target_is_python=False): + self.ffi = ffi + self.module_name = module_name + self.target_is_python = target_is_python + self._version = VERSION_BASE + + def needs_version(self, ver): + self._version = max(self._version, ver) + + def collect_type_table(self): + self._typesdict = {} + self._generate("collecttype") + # + all_decls = sorted(self._typesdict, key=str) + # + # prepare all FUNCTION bytecode sequences first + self.cffi_types = [] + for tp in all_decls: + if tp.is_raw_function: + assert self._typesdict[tp] is None + self._typesdict[tp] = len(self.cffi_types) + self.cffi_types.append(tp) # placeholder + for tp1 in tp.args: + assert isinstance(tp1, (model.VoidType, + model.BasePrimitiveType, + model.PointerType, + model.StructOrUnionOrEnum, + model.FunctionPtrType)) + if self._typesdict[tp1] is None: + self._typesdict[tp1] = len(self.cffi_types) + self.cffi_types.append(tp1) # placeholder + self.cffi_types.append('END') # placeholder + # + # prepare all OTHER bytecode sequences + for tp in all_decls: + if not tp.is_raw_function and self._typesdict[tp] is None: + self._typesdict[tp] = len(self.cffi_types) + self.cffi_types.append(tp) # placeholder + if tp.is_array_type and tp.length is not None: + self.cffi_types.append('LEN') # placeholder + assert None not in self._typesdict.values() + # + # collect all structs and unions and enums + self._struct_unions = {} + self._enums = {} + for tp in all_decls: + if isinstance(tp, model.StructOrUnion): + self._struct_unions[tp] = None + elif isinstance(tp, model.EnumType): + self._enums[tp] = None + for i, tp in enumerate(sorted(self._struct_unions, + key=lambda tp: tp.name)): + self._struct_unions[tp] = i + for i, tp in enumerate(sorted(self._enums, + key=lambda tp: tp.name)): + self._enums[tp] = i + # + # emit all bytecode sequences now + for tp in all_decls: + method = getattr(self, '_emit_bytecode_' + tp.__class__.__name__) + method(tp, self._typesdict[tp]) + # + # consistency check + for op in self.cffi_types: + assert isinstance(op, CffiOp) + self.cffi_types = tuple(self.cffi_types) # don't change any more + + def _enum_fields(self, tp): + # When producing C, expand all anonymous struct/union fields. + # That's necessary to have C code checking the offsets of the + # individual fields contained in them. When producing Python, + # don't do it and instead write it like it is, with the + # corresponding fields having an empty name. Empty names are + # recognized at runtime when we import the generated Python + # file. + expand_anonymous_struct_union = not self.target_is_python + return tp.enumfields(expand_anonymous_struct_union) + + def _do_collect_type(self, tp): + if not isinstance(tp, model.BaseTypeByIdentity): + if isinstance(tp, tuple): + for x in tp: + self._do_collect_type(x) + return + if tp not in self._typesdict: + self._typesdict[tp] = None + if isinstance(tp, model.FunctionPtrType): + self._do_collect_type(tp.as_raw_function()) + elif isinstance(tp, model.StructOrUnion): + if tp.fldtypes is not None and ( + tp not in self.ffi._parser._included_declarations): + for name1, tp1, _, _ in self._enum_fields(tp): + self._do_collect_type(self._field_type(tp, name1, tp1)) + else: + for _, x in tp._get_items(): + self._do_collect_type(x) + + def _generate(self, step_name): + lst = self.ffi._parser._declarations.items() + for name, (tp, quals) in sorted(lst): + kind, realname = name.split(' ', 1) + try: + method = getattr(self, '_generate_cpy_%s_%s' % (kind, + step_name)) + except AttributeError: + raise VerificationError( + "not implemented in recompile(): %r" % name) + try: + self._current_quals = quals + method(tp, realname) + except Exception as e: + model.attach_exception_info(e, name) + raise + + # ---------- + + ALL_STEPS = ["global", "field", "struct_union", "enum", "typename"] + + def collect_step_tables(self): + # collect the declarations for '_cffi_globals', '_cffi_typenames', etc. + self._lsts = {} + for step_name in self.ALL_STEPS: + self._lsts[step_name] = [] + self._seen_struct_unions = set() + self._generate("ctx") + self._add_missing_struct_unions() + # + for step_name in self.ALL_STEPS: + lst = self._lsts[step_name] + if step_name != "field": + lst.sort(key=lambda entry: entry.name) + self._lsts[step_name] = tuple(lst) # don't change any more + # + # check for a possible internal inconsistency: _cffi_struct_unions + # should have been generated with exactly self._struct_unions + lst = self._lsts["struct_union"] + for tp, i in self._struct_unions.items(): + assert i < len(lst) + assert lst[i].name == tp.name + assert len(lst) == len(self._struct_unions) + # same with enums + lst = self._lsts["enum"] + for tp, i in self._enums.items(): + assert i < len(lst) + assert lst[i].name == tp.name + assert len(lst) == len(self._enums) + + # ---------- + + def _prnt(self, what=''): + self._f.write(what + '\n') + + def write_source_to_f(self, f, preamble): + if self.target_is_python: + assert preamble is None + self.write_py_source_to_f(f) + else: + assert preamble is not None + self.write_c_source_to_f(f, preamble) + + def _rel_readlines(self, filename): + g = open(os.path.join(os.path.dirname(__file__), filename), 'r') + lines = g.readlines() + g.close() + return lines + + def write_c_source_to_f(self, f, preamble): + self._f = f + prnt = self._prnt + if self.ffi._embedding is not None: + prnt('#define _CFFI_USE_EMBEDDING') + if not USE_LIMITED_API: + prnt('#define _CFFI_NO_LIMITED_API') + # + # first the '#include' (actually done by inlining the file's content) + lines = self._rel_readlines('_cffi_include.h') + i = lines.index('#include "parse_c_type.h"\n') + lines[i:i+1] = self._rel_readlines('parse_c_type.h') + prnt(''.join(lines)) + # + # if we have ffi._embedding != None, we give it here as a macro + # and include an extra file + base_module_name = self.module_name.split('.')[-1] + if self.ffi._embedding is not None: + prnt('#define _CFFI_MODULE_NAME "%s"' % (self.module_name,)) + prnt('static const char _CFFI_PYTHON_STARTUP_CODE[] = {') + self._print_string_literal_in_array(self.ffi._embedding) + prnt('0 };') + prnt('#ifdef PYPY_VERSION') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC _cffi_pypyinit_%s' % ( + base_module_name,)) + prnt('#elif PY_MAJOR_VERSION >= 3') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC PyInit_%s' % ( + base_module_name,)) + prnt('#else') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC init%s' % ( + base_module_name,)) + prnt('#endif') + lines = self._rel_readlines('_embedding.h') + i = lines.index('#include "_cffi_errors.h"\n') + lines[i:i+1] = self._rel_readlines('_cffi_errors.h') + prnt(''.join(lines)) + self.needs_version(VERSION_EMBEDDED) + # + # then paste the C source given by the user, verbatim. + prnt('/************************************************************/') + prnt() + prnt(preamble) + prnt() + prnt('/************************************************************/') + prnt() + # + # the declaration of '_cffi_types' + prnt('static void *_cffi_types[] = {') + typeindex2type = dict([(i, tp) for (tp, i) in self._typesdict.items()]) + for i, op in enumerate(self.cffi_types): + comment = '' + if i in typeindex2type: + comment = ' // ' + typeindex2type[i]._get_c_name() + prnt('/* %2d */ %s,%s' % (i, op.as_c_expr(), comment)) + if not self.cffi_types: + prnt(' 0') + prnt('};') + prnt() + # + # call generate_cpy_xxx_decl(), for every xxx found from + # ffi._parser._declarations. This generates all the functions. + self._seen_constants = set() + self._generate("decl") + # + # the declaration of '_cffi_globals' and '_cffi_typenames' + nums = {} + for step_name in self.ALL_STEPS: + lst = self._lsts[step_name] + nums[step_name] = len(lst) + if nums[step_name] > 0: + prnt('static const struct _cffi_%s_s _cffi_%ss[] = {' % ( + step_name, step_name)) + for entry in lst: + prnt(entry.as_c_expr()) + prnt('};') + prnt() + # + # the declaration of '_cffi_includes' + if self.ffi._included_ffis: + prnt('static const char * const _cffi_includes[] = {') + for ffi_to_include in self.ffi._included_ffis: + try: + included_module_name, included_source = ( + ffi_to_include._assigned_source[:2]) + except AttributeError: + raise VerificationError( + "ffi object %r includes %r, but the latter has not " + "been prepared with set_source()" % ( + self.ffi, ffi_to_include,)) + if included_source is None: + raise VerificationError( + "not implemented yet: ffi.include() of a Python-based " + "ffi inside a C-based ffi") + prnt(' "%s",' % (included_module_name,)) + prnt(' NULL') + prnt('};') + prnt() + # + # the declaration of '_cffi_type_context' + prnt('static const struct _cffi_type_context_s _cffi_type_context = {') + prnt(' _cffi_types,') + for step_name in self.ALL_STEPS: + if nums[step_name] > 0: + prnt(' _cffi_%ss,' % step_name) + else: + prnt(' NULL, /* no %ss */' % step_name) + for step_name in self.ALL_STEPS: + if step_name != "field": + prnt(' %d, /* num_%ss */' % (nums[step_name], step_name)) + if self.ffi._included_ffis: + prnt(' _cffi_includes,') + else: + prnt(' NULL, /* no includes */') + prnt(' %d, /* num_types */' % (len(self.cffi_types),)) + flags = 0 + if self._num_externpy > 0 or self.ffi._embedding is not None: + flags |= 1 # set to mean that we use extern "Python" + prnt(' %d, /* flags */' % flags) + prnt('};') + prnt() + # + # the init function + prnt('#ifdef __GNUC__') + prnt('# pragma GCC visibility push(default) /* for -fvisibility= */') + prnt('#endif') + prnt() + prnt('#ifdef PYPY_VERSION') + prnt('PyMODINIT_FUNC') + prnt('_cffi_pypyinit_%s(const void *p[])' % (base_module_name,)) + prnt('{') + if flags & 1: + prnt(' if (((intptr_t)p[0]) >= 0x0A03) {') + prnt(' _cffi_call_python_org = ' + '(void(*)(struct _cffi_externpy_s *, char *))p[1];') + prnt(' }') + prnt(' p[0] = (const void *)0x%x;' % self._version) + prnt(' p[1] = &_cffi_type_context;') + prnt('#if PY_MAJOR_VERSION >= 3') + prnt(' return NULL;') + prnt('#endif') + prnt('}') + # on Windows, distutils insists on putting init_cffi_xyz in + # 'export_symbols', so instead of fighting it, just give up and + # give it one + prnt('# ifdef _MSC_VER') + prnt(' PyMODINIT_FUNC') + prnt('# if PY_MAJOR_VERSION >= 3') + prnt(' PyInit_%s(void) { return NULL; }' % (base_module_name,)) + prnt('# else') + prnt(' init%s(void) { }' % (base_module_name,)) + prnt('# endif') + prnt('# endif') + prnt('#elif PY_MAJOR_VERSION >= 3') + prnt('PyMODINIT_FUNC') + prnt('PyInit_%s(void)' % (base_module_name,)) + prnt('{') + prnt(' return _cffi_init("%s", 0x%x, &_cffi_type_context);' % ( + self.module_name, self._version)) + prnt('}') + prnt('#else') + prnt('PyMODINIT_FUNC') + prnt('init%s(void)' % (base_module_name,)) + prnt('{') + prnt(' _cffi_init("%s", 0x%x, &_cffi_type_context);' % ( + self.module_name, self._version)) + prnt('}') + prnt('#endif') + prnt() + prnt('#ifdef __GNUC__') + prnt('# pragma GCC visibility pop') + prnt('#endif') + self._version = None + + def _to_py(self, x): + if isinstance(x, str): + return "b'%s'" % (x,) + if isinstance(x, (list, tuple)): + rep = [self._to_py(item) for item in x] + if len(rep) == 1: + rep.append('') + return "(%s)" % (','.join(rep),) + return x.as_python_expr() # Py2: unicode unexpected; Py3: bytes unexp. + + def write_py_source_to_f(self, f): + self._f = f + prnt = self._prnt + # + # header + prnt("# auto-generated file") + prnt("import _cffi_backend") + # + # the 'import' of the included ffis + num_includes = len(self.ffi._included_ffis or ()) + for i in range(num_includes): + ffi_to_include = self.ffi._included_ffis[i] + try: + included_module_name, included_source = ( + ffi_to_include._assigned_source[:2]) + except AttributeError: + raise VerificationError( + "ffi object %r includes %r, but the latter has not " + "been prepared with set_source()" % ( + self.ffi, ffi_to_include,)) + if included_source is not None: + raise VerificationError( + "not implemented yet: ffi.include() of a C-based " + "ffi inside a Python-based ffi") + prnt('from %s import ffi as _ffi%d' % (included_module_name, i)) + prnt() + prnt("ffi = _cffi_backend.FFI('%s'," % (self.module_name,)) + prnt(" _version = 0x%x," % (self._version,)) + self._version = None + # + # the '_types' keyword argument + self.cffi_types = tuple(self.cffi_types) # don't change any more + types_lst = [op.as_python_bytes() for op in self.cffi_types] + prnt(' _types = %s,' % (self._to_py(''.join(types_lst)),)) + typeindex2type = dict([(i, tp) for (tp, i) in self._typesdict.items()]) + # + # the keyword arguments from ALL_STEPS + for step_name in self.ALL_STEPS: + lst = self._lsts[step_name] + if len(lst) > 0 and step_name != "field": + prnt(' _%ss = %s,' % (step_name, self._to_py(lst))) + # + # the '_includes' keyword argument + if num_includes > 0: + prnt(' _includes = (%s,),' % ( + ', '.join(['_ffi%d' % i for i in range(num_includes)]),)) + # + # the footer + prnt(')') + + # ---------- + + def _gettypenum(self, type): + # a KeyError here is a bug. please report it! :-) + return self._typesdict[type] + + def _convert_funcarg_to_c(self, tp, fromvar, tovar, errcode): + extraarg = '' + if isinstance(tp, model.BasePrimitiveType) and not tp.is_complex_type(): + if tp.is_integer_type() and tp.name != '_Bool': + converter = '_cffi_to_c_int' + extraarg = ', %s' % tp.name + elif isinstance(tp, model.UnknownFloatType): + # don't check with is_float_type(): it may be a 'long + # double' here, and _cffi_to_c_double would loose precision + converter = '(%s)_cffi_to_c_double' % (tp.get_c_name(''),) + else: + cname = tp.get_c_name('') + converter = '(%s)_cffi_to_c_%s' % (cname, + tp.name.replace(' ', '_')) + if cname in ('char16_t', 'char32_t'): + self.needs_version(VERSION_CHAR16CHAR32) + errvalue = '-1' + # + elif isinstance(tp, model.PointerType): + self._convert_funcarg_to_c_ptr_or_array(tp, fromvar, + tovar, errcode) + return + # + elif (isinstance(tp, model.StructOrUnionOrEnum) or + isinstance(tp, model.BasePrimitiveType)): + # a struct (not a struct pointer) as a function argument; + # or, a complex (the same code works) + self._prnt(' if (_cffi_to_c((char *)&%s, _cffi_type(%d), %s) < 0)' + % (tovar, self._gettypenum(tp), fromvar)) + self._prnt(' %s;' % errcode) + return + # + elif isinstance(tp, model.FunctionPtrType): + converter = '(%s)_cffi_to_c_pointer' % tp.get_c_name('') + extraarg = ', _cffi_type(%d)' % self._gettypenum(tp) + errvalue = 'NULL' + # + else: + raise NotImplementedError(tp) + # + self._prnt(' %s = %s(%s%s);' % (tovar, converter, fromvar, extraarg)) + self._prnt(' if (%s == (%s)%s && PyErr_Occurred())' % ( + tovar, tp.get_c_name(''), errvalue)) + self._prnt(' %s;' % errcode) + + def _extra_local_variables(self, tp, localvars, freelines): + if isinstance(tp, model.PointerType): + localvars.add('Py_ssize_t datasize') + localvars.add('struct _cffi_freeme_s *large_args_free = NULL') + freelines.add('if (large_args_free != NULL)' + ' _cffi_free_array_arguments(large_args_free);') + + def _convert_funcarg_to_c_ptr_or_array(self, tp, fromvar, tovar, errcode): + self._prnt(' datasize = _cffi_prepare_pointer_call_argument(') + self._prnt(' _cffi_type(%d), %s, (char **)&%s);' % ( + self._gettypenum(tp), fromvar, tovar)) + self._prnt(' if (datasize != 0) {') + self._prnt(' %s = ((size_t)datasize) <= 640 ? ' + '(%s)alloca((size_t)datasize) : NULL;' % ( + tovar, tp.get_c_name(''))) + self._prnt(' if (_cffi_convert_array_argument(_cffi_type(%d), %s, ' + '(char **)&%s,' % (self._gettypenum(tp), fromvar, tovar)) + self._prnt(' datasize, &large_args_free) < 0)') + self._prnt(' %s;' % errcode) + self._prnt(' }') + + def _convert_expr_from_c(self, tp, var, context): + if isinstance(tp, model.BasePrimitiveType): + if tp.is_integer_type() and tp.name != '_Bool': + return '_cffi_from_c_int(%s, %s)' % (var, tp.name) + elif isinstance(tp, model.UnknownFloatType): + return '_cffi_from_c_double(%s)' % (var,) + elif tp.name != 'long double' and not tp.is_complex_type(): + cname = tp.name.replace(' ', '_') + if cname in ('char16_t', 'char32_t'): + self.needs_version(VERSION_CHAR16CHAR32) + return '_cffi_from_c_%s(%s)' % (cname, var) + else: + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, (model.PointerType, model.FunctionPtrType)): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.ArrayType): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(model.PointerType(tp.item))) + elif isinstance(tp, model.StructOrUnion): + if tp.fldnames is None: + raise TypeError("'%s' is used as %s, but is opaque" % ( + tp._get_c_name(), context)) + return '_cffi_from_c_struct((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.EnumType): + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + else: + raise NotImplementedError(tp) + + # ---------- + # typedefs + + def _typedef_type(self, tp, name): + return self._global_type(tp, "(*(%s *)0)" % (name,)) + + def _generate_cpy_typedef_collecttype(self, tp, name): + self._do_collect_type(self._typedef_type(tp, name)) + + def _generate_cpy_typedef_decl(self, tp, name): + pass + + def _typedef_ctx(self, tp, name): + type_index = self._typesdict[tp] + self._lsts["typename"].append(TypenameExpr(name, type_index)) + + def _generate_cpy_typedef_ctx(self, tp, name): + tp = self._typedef_type(tp, name) + self._typedef_ctx(tp, name) + if getattr(tp, "origin", None) == "unknown_type": + self._struct_ctx(tp, tp.name, approxname=None) + elif isinstance(tp, model.NamedPointerType): + self._struct_ctx(tp.totype, tp.totype.name, approxname=tp.name, + named_ptr=tp) + + # ---------- + # function declarations + + def _generate_cpy_function_collecttype(self, tp, name): + self._do_collect_type(tp.as_raw_function()) + if tp.ellipsis and not self.target_is_python: + self._do_collect_type(tp) + + def _generate_cpy_function_decl(self, tp, name): + assert not self.target_is_python + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + # cannot support vararg functions better than this: check for its + # exact type (including the fixed arguments), and build it as a + # constant function pointer (no CPython wrapper) + self._generate_cpy_constant_decl(tp, name) + return + prnt = self._prnt + numargs = len(tp.args) + if numargs == 0: + argname = 'noarg' + elif numargs == 1: + argname = 'arg0' + else: + argname = 'args' + # + # ------------------------------ + # the 'd' version of the function, only for addressof(lib, 'func') + arguments = [] + call_arguments = [] + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + arguments.append(type.get_c_name(' x%d' % i, context)) + call_arguments.append('x%d' % i) + repr_arguments = ', '.join(arguments) + repr_arguments = repr_arguments or 'void' + if tp.abi: + abi = tp.abi + ' ' + else: + abi = '' + name_and_arguments = '%s_cffi_d_%s(%s)' % (abi, name, repr_arguments) + prnt('static %s' % (tp.result.get_c_name(name_and_arguments),)) + prnt('{') + call_arguments = ', '.join(call_arguments) + result_code = 'return ' + if isinstance(tp.result, model.VoidType): + result_code = '' + prnt(' %s%s(%s);' % (result_code, name, call_arguments)) + prnt('}') + # + prnt('#ifndef PYPY_VERSION') # ------------------------------ + # + prnt('static PyObject *') + prnt('_cffi_f_%s(PyObject *self, PyObject *%s)' % (name, argname)) + prnt('{') + # + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + arg = type.get_c_name(' x%d' % i, context) + prnt(' %s;' % arg) + # + localvars = set() + freelines = set() + for type in tp.args: + self._extra_local_variables(type, localvars, freelines) + for decl in sorted(localvars): + prnt(' %s;' % (decl,)) + # + if not isinstance(tp.result, model.VoidType): + result_code = 'result = ' + context = 'result of %s' % name + result_decl = ' %s;' % tp.result.get_c_name(' result', context) + prnt(result_decl) + prnt(' PyObject *pyresult;') + else: + result_decl = None + result_code = '' + # + if len(tp.args) > 1: + rng = range(len(tp.args)) + for i in rng: + prnt(' PyObject *arg%d;' % i) + prnt() + prnt(' if (!PyArg_UnpackTuple(args, "%s", %d, %d, %s))' % ( + name, len(rng), len(rng), + ', '.join(['&arg%d' % i for i in rng]))) + prnt(' return NULL;') + prnt() + # + for i, type in enumerate(tp.args): + self._convert_funcarg_to_c(type, 'arg%d' % i, 'x%d' % i, + 'return NULL') + prnt() + # + prnt(' Py_BEGIN_ALLOW_THREADS') + prnt(' _cffi_restore_errno();') + call_arguments = ['x%d' % i for i in range(len(tp.args))] + call_arguments = ', '.join(call_arguments) + prnt(' { %s%s(%s); }' % (result_code, name, call_arguments)) + prnt(' _cffi_save_errno();') + prnt(' Py_END_ALLOW_THREADS') + prnt() + # + prnt(' (void)self; /* unused */') + if numargs == 0: + prnt(' (void)noarg; /* unused */') + if result_code: + prnt(' pyresult = %s;' % + self._convert_expr_from_c(tp.result, 'result', 'result type')) + for freeline in freelines: + prnt(' ' + freeline) + prnt(' return pyresult;') + else: + for freeline in freelines: + prnt(' ' + freeline) + prnt(' Py_INCREF(Py_None);') + prnt(' return Py_None;') + prnt('}') + # + prnt('#else') # ------------------------------ + # + # the PyPy version: need to replace struct/union arguments with + # pointers, and if the result is a struct/union, insert a first + # arg that is a pointer to the result. We also do that for + # complex args and return type. + def need_indirection(type): + return (isinstance(type, model.StructOrUnion) or + (isinstance(type, model.PrimitiveType) and + type.is_complex_type())) + difference = False + arguments = [] + call_arguments = [] + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + indirection = '' + if need_indirection(type): + indirection = '*' + difference = True + arg = type.get_c_name(' %sx%d' % (indirection, i), context) + arguments.append(arg) + call_arguments.append('%sx%d' % (indirection, i)) + tp_result = tp.result + if need_indirection(tp_result): + context = 'result of %s' % name + arg = tp_result.get_c_name(' *result', context) + arguments.insert(0, arg) + tp_result = model.void_type + result_decl = None + result_code = '*result = ' + difference = True + if difference: + repr_arguments = ', '.join(arguments) + repr_arguments = repr_arguments or 'void' + name_and_arguments = '%s_cffi_f_%s(%s)' % (abi, name, + repr_arguments) + prnt('static %s' % (tp_result.get_c_name(name_and_arguments),)) + prnt('{') + if result_decl: + prnt(result_decl) + call_arguments = ', '.join(call_arguments) + prnt(' { %s%s(%s); }' % (result_code, name, call_arguments)) + if result_decl: + prnt(' return result;') + prnt('}') + else: + prnt('# define _cffi_f_%s _cffi_d_%s' % (name, name)) + # + prnt('#endif') # ------------------------------ + prnt() + + def _generate_cpy_function_ctx(self, tp, name): + if tp.ellipsis and not self.target_is_python: + self._generate_cpy_constant_ctx(tp, name) + return + type_index = self._typesdict[tp.as_raw_function()] + numargs = len(tp.args) + if self.target_is_python: + meth_kind = OP_DLOPEN_FUNC + elif numargs == 0: + meth_kind = OP_CPYTHON_BLTN_N # 'METH_NOARGS' + elif numargs == 1: + meth_kind = OP_CPYTHON_BLTN_O # 'METH_O' + else: + meth_kind = OP_CPYTHON_BLTN_V # 'METH_VARARGS' + self._lsts["global"].append( + GlobalExpr(name, '_cffi_f_%s' % name, + CffiOp(meth_kind, type_index), + size='_cffi_d_%s' % name)) + + # ---------- + # named structs or unions + + def _field_type(self, tp_struct, field_name, tp_field): + if isinstance(tp_field, model.ArrayType): + actual_length = tp_field.length + if actual_length == '...': + ptr_struct_name = tp_struct.get_c_name('*') + actual_length = '_cffi_array_len(((%s)0)->%s)' % ( + ptr_struct_name, field_name) + tp_item = self._field_type(tp_struct, '%s[0]' % field_name, + tp_field.item) + tp_field = model.ArrayType(tp_item, actual_length) + return tp_field + + def _struct_collecttype(self, tp): + self._do_collect_type(tp) + if self.target_is_python: + # also requires nested anon struct/unions in ABI mode, recursively + for fldtype in tp.anonymous_struct_fields(): + self._struct_collecttype(fldtype) + + def _struct_decl(self, tp, cname, approxname): + if tp.fldtypes is None: + return + prnt = self._prnt + checkfuncname = '_cffi_checkfld_%s' % (approxname,) + prnt('_CFFI_UNUSED_FN') + prnt('static void %s(%s *p)' % (checkfuncname, cname)) + prnt('{') + prnt(' /* only to generate compile-time warnings or errors */') + prnt(' (void)p;') + for fname, ftype, fbitsize, fqual in self._enum_fields(tp): + try: + if ftype.is_integer_type() or fbitsize >= 0: + # accept all integers, but complain on float or double + if fname != '': + prnt(" (void)((p->%s) | 0); /* check that '%s.%s' is " + "an integer */" % (fname, cname, fname)) + continue + # only accept exactly the type declared, except that '[]' + # is interpreted as a '*' and so will match any array length. + # (It would also match '*', but that's harder to detect...) + while (isinstance(ftype, model.ArrayType) + and (ftype.length is None or ftype.length == '...')): + ftype = ftype.item + fname = fname + '[0]' + prnt(' { %s = &p->%s; (void)tmp; }' % ( + ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual), + fname)) + except VerificationError as e: + prnt(' /* %s */' % str(e)) # cannot verify it, ignore + prnt('}') + prnt('struct _cffi_align_%s { char x; %s y; };' % (approxname, cname)) + prnt() + + def _struct_ctx(self, tp, cname, approxname, named_ptr=None): + type_index = self._typesdict[tp] + reason_for_not_expanding = None + flags = [] + if isinstance(tp, model.UnionType): + flags.append("_CFFI_F_UNION") + if tp.fldtypes is None: + flags.append("_CFFI_F_OPAQUE") + reason_for_not_expanding = "opaque" + if (tp not in self.ffi._parser._included_declarations and + (named_ptr is None or + named_ptr not in self.ffi._parser._included_declarations)): + if tp.fldtypes is None: + pass # opaque + elif tp.partial or any(tp.anonymous_struct_fields()): + pass # field layout obtained silently from the C compiler + else: + flags.append("_CFFI_F_CHECK_FIELDS") + if tp.packed: + if tp.packed > 1: + raise NotImplementedError( + "%r is declared with 'pack=%r'; only 0 or 1 are " + "supported in API mode (try to use \"...;\", which " + "does not require a 'pack' declaration)" % + (tp, tp.packed)) + flags.append("_CFFI_F_PACKED") + else: + flags.append("_CFFI_F_EXTERNAL") + reason_for_not_expanding = "external" + flags = '|'.join(flags) or '0' + c_fields = [] + if reason_for_not_expanding is None: + enumfields = list(self._enum_fields(tp)) + for fldname, fldtype, fbitsize, fqual in enumfields: + fldtype = self._field_type(tp, fldname, fldtype) + self._check_not_opaque(fldtype, + "field '%s.%s'" % (tp.name, fldname)) + # cname is None for _add_missing_struct_unions() only + op = OP_NOOP + if fbitsize >= 0: + op = OP_BITFIELD + size = '%d /* bits */' % fbitsize + elif cname is None or ( + isinstance(fldtype, model.ArrayType) and + fldtype.length is None): + size = '(size_t)-1' + else: + size = 'sizeof(((%s)0)->%s)' % ( + tp.get_c_name('*') if named_ptr is None + else named_ptr.name, + fldname) + if cname is None or fbitsize >= 0: + offset = '(size_t)-1' + elif named_ptr is not None: + offset = '(size_t)(((char *)&((%s)4096)->%s) - (char *)4096)' % ( + named_ptr.name, fldname) + else: + offset = 'offsetof(%s, %s)' % (tp.get_c_name(''), fldname) + c_fields.append( + FieldExpr(fldname, offset, size, fbitsize, + CffiOp(op, self._typesdict[fldtype]))) + first_field_index = len(self._lsts["field"]) + self._lsts["field"].extend(c_fields) + # + if cname is None: # unknown name, for _add_missing_struct_unions + size = '(size_t)-2' + align = -2 + comment = "unnamed" + else: + if named_ptr is not None: + size = 'sizeof(*(%s)0)' % (named_ptr.name,) + align = '-1 /* unknown alignment */' + else: + size = 'sizeof(%s)' % (cname,) + align = 'offsetof(struct _cffi_align_%s, y)' % (approxname,) + comment = None + else: + size = '(size_t)-1' + align = -1 + first_field_index = -1 + comment = reason_for_not_expanding + self._lsts["struct_union"].append( + StructUnionExpr(tp.name, type_index, flags, size, align, comment, + first_field_index, c_fields)) + self._seen_struct_unions.add(tp) + + def _check_not_opaque(self, tp, location): + while isinstance(tp, model.ArrayType): + tp = tp.item + if isinstance(tp, model.StructOrUnion) and tp.fldtypes is None: + raise TypeError( + "%s is of an opaque type (not declared in cdef())" % location) + + def _add_missing_struct_unions(self): + # not very nice, but some struct declarations might be missing + # because they don't have any known C name. Check that they are + # not partial (we can't complete or verify them!) and emit them + # anonymously. + lst = list(self._struct_unions.items()) + lst.sort(key=lambda tp_order: tp_order[1]) + for tp, order in lst: + if tp not in self._seen_struct_unions: + if tp.partial: + raise NotImplementedError("internal inconsistency: %r is " + "partial but was not seen at " + "this point" % (tp,)) + if tp.name.startswith('$') and tp.name[1:].isdigit(): + approxname = tp.name[1:] + elif tp.name == '_IO_FILE' and tp.forcename == 'FILE': + approxname = 'FILE' + self._typedef_ctx(tp, 'FILE') + else: + raise NotImplementedError("internal inconsistency: %r" % + (tp,)) + self._struct_ctx(tp, None, approxname) + + def _generate_cpy_struct_collecttype(self, tp, name): + self._struct_collecttype(tp) + _generate_cpy_union_collecttype = _generate_cpy_struct_collecttype + + def _struct_names(self, tp): + cname = tp.get_c_name('') + if ' ' in cname: + return cname, cname.replace(' ', '_') + else: + return cname, '_' + cname + + def _generate_cpy_struct_decl(self, tp, name): + self._struct_decl(tp, *self._struct_names(tp)) + _generate_cpy_union_decl = _generate_cpy_struct_decl + + def _generate_cpy_struct_ctx(self, tp, name): + self._struct_ctx(tp, *self._struct_names(tp)) + _generate_cpy_union_ctx = _generate_cpy_struct_ctx + + # ---------- + # 'anonymous' declarations. These are produced for anonymous structs + # or unions; the 'name' is obtained by a typedef. + + def _generate_cpy_anonymous_collecttype(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_cpy_enum_collecttype(tp, name) + else: + self._struct_collecttype(tp) + + def _generate_cpy_anonymous_decl(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_cpy_enum_decl(tp) + else: + self._struct_decl(tp, name, 'typedef_' + name) + + def _generate_cpy_anonymous_ctx(self, tp, name): + if isinstance(tp, model.EnumType): + self._enum_ctx(tp, name) + else: + self._struct_ctx(tp, name, 'typedef_' + name) + + # ---------- + # constants, declared with "static const ..." + + def _generate_cpy_const(self, is_int, name, tp=None, category='const', + check_value=None): + if (category, name) in self._seen_constants: + raise VerificationError( + "duplicate declaration of %s '%s'" % (category, name)) + self._seen_constants.add((category, name)) + # + prnt = self._prnt + funcname = '_cffi_%s_%s' % (category, name) + if is_int: + prnt('static int %s(unsigned long long *o)' % funcname) + prnt('{') + prnt(' int n = (%s) <= 0;' % (name,)) + prnt(' *o = (unsigned long long)((%s) | 0);' + ' /* check that %s is an integer */' % (name, name)) + if check_value is not None: + if check_value > 0: + check_value = '%dU' % (check_value,) + prnt(' if (!_cffi_check_int(*o, n, %s))' % (check_value,)) + prnt(' n |= 2;') + prnt(' return n;') + prnt('}') + else: + assert check_value is None + prnt('static void %s(char *o)' % funcname) + prnt('{') + prnt(' *(%s)o = %s;' % (tp.get_c_name('*'), name)) + prnt('}') + prnt() + + def _generate_cpy_constant_collecttype(self, tp, name): + is_int = tp.is_integer_type() + if not is_int or self.target_is_python: + self._do_collect_type(tp) + + def _generate_cpy_constant_decl(self, tp, name): + is_int = tp.is_integer_type() + self._generate_cpy_const(is_int, name, tp) + + def _generate_cpy_constant_ctx(self, tp, name): + if not self.target_is_python and tp.is_integer_type(): + type_op = CffiOp(OP_CONSTANT_INT, -1) + else: + if self.target_is_python: + const_kind = OP_DLOPEN_CONST + else: + const_kind = OP_CONSTANT + type_index = self._typesdict[tp] + type_op = CffiOp(const_kind, type_index) + self._lsts["global"].append( + GlobalExpr(name, '_cffi_const_%s' % name, type_op)) + + # ---------- + # enums + + def _generate_cpy_enum_collecttype(self, tp, name): + self._do_collect_type(tp) + + def _generate_cpy_enum_decl(self, tp, name=None): + for enumerator in tp.enumerators: + self._generate_cpy_const(True, enumerator) + + def _enum_ctx(self, tp, cname): + type_index = self._typesdict[tp] + type_op = CffiOp(OP_ENUM, -1) + if self.target_is_python: + tp.check_not_partial() + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + self._lsts["global"].append( + GlobalExpr(enumerator, '_cffi_const_%s' % enumerator, type_op, + check_value=enumvalue)) + # + if cname is not None and '$' not in cname and not self.target_is_python: + size = "sizeof(%s)" % cname + signed = "((%s)-1) <= 0" % cname + else: + basetp = tp.build_baseinttype(self.ffi, []) + size = self.ffi.sizeof(basetp) + signed = int(int(self.ffi.cast(basetp, -1)) < 0) + allenums = ",".join(tp.enumerators) + self._lsts["enum"].append( + EnumExpr(tp.name, type_index, size, signed, allenums)) + + def _generate_cpy_enum_ctx(self, tp, name): + self._enum_ctx(tp, tp._get_c_name()) + + # ---------- + # macros: for now only for integers + + def _generate_cpy_macro_collecttype(self, tp, name): + pass + + def _generate_cpy_macro_decl(self, tp, name): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + self._generate_cpy_const(True, name, check_value=check_value) + + def _generate_cpy_macro_ctx(self, tp, name): + if tp == '...': + if self.target_is_python: + raise VerificationError( + "cannot use the syntax '...' in '#define %s ...' when " + "using the ABI mode" % (name,)) + check_value = None + else: + check_value = tp # an integer + type_op = CffiOp(OP_CONSTANT_INT, -1) + self._lsts["global"].append( + GlobalExpr(name, '_cffi_const_%s' % name, type_op, + check_value=check_value)) + + # ---------- + # global variables + + def _global_type(self, tp, global_name): + if isinstance(tp, model.ArrayType): + actual_length = tp.length + if actual_length == '...': + actual_length = '_cffi_array_len(%s)' % (global_name,) + tp_item = self._global_type(tp.item, '%s[0]' % global_name) + tp = model.ArrayType(tp_item, actual_length) + return tp + + def _generate_cpy_variable_collecttype(self, tp, name): + self._do_collect_type(self._global_type(tp, name)) + + def _generate_cpy_variable_decl(self, tp, name): + prnt = self._prnt + tp = self._global_type(tp, name) + if isinstance(tp, model.ArrayType) and tp.length is None: + tp = tp.item + ampersand = '' + else: + ampersand = '&' + # This code assumes that casts from "tp *" to "void *" is a + # no-op, i.e. a function that returns a "tp *" can be called + # as if it returned a "void *". This should be generally true + # on any modern machine. The only exception to that rule (on + # uncommon architectures, and as far as I can tell) might be + # if 'tp' were a function type, but that is not possible here. + # (If 'tp' is a function _pointer_ type, then casts from "fn_t + # **" to "void *" are again no-ops, as far as I can tell.) + decl = '*_cffi_var_%s(void)' % (name,) + prnt('static ' + tp.get_c_name(decl, quals=self._current_quals)) + prnt('{') + prnt(' return %s(%s);' % (ampersand, name)) + prnt('}') + prnt() + + def _generate_cpy_variable_ctx(self, tp, name): + tp = self._global_type(tp, name) + type_index = self._typesdict[tp] + if self.target_is_python: + op = OP_GLOBAL_VAR + else: + op = OP_GLOBAL_VAR_F + self._lsts["global"].append( + GlobalExpr(name, '_cffi_var_%s' % name, CffiOp(op, type_index))) + + # ---------- + # extern "Python" + + def _generate_cpy_extern_python_collecttype(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + self._do_collect_type(tp) + _generate_cpy_dllexport_python_collecttype = \ + _generate_cpy_extern_python_plus_c_collecttype = \ + _generate_cpy_extern_python_collecttype + + def _extern_python_decl(self, tp, name, tag_and_space): + prnt = self._prnt + if isinstance(tp.result, model.VoidType): + size_of_result = '0' + else: + context = 'result of %s' % name + size_of_result = '(int)sizeof(%s)' % ( + tp.result.get_c_name('', context),) + prnt('static struct _cffi_externpy_s _cffi_externpy__%s =' % name) + prnt(' { "%s.%s", %s, 0, 0 };' % ( + self.module_name, name, size_of_result)) + prnt() + # + arguments = [] + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + arg = type.get_c_name(' a%d' % i, context) + arguments.append(arg) + # + repr_arguments = ', '.join(arguments) + repr_arguments = repr_arguments or 'void' + name_and_arguments = '%s(%s)' % (name, repr_arguments) + if tp.abi == "__stdcall": + name_and_arguments = '_cffi_stdcall ' + name_and_arguments + # + def may_need_128_bits(tp): + return (isinstance(tp, model.PrimitiveType) and + tp.name == 'long double') + # + size_of_a = max(len(tp.args)*8, 8) + if may_need_128_bits(tp.result): + size_of_a = max(size_of_a, 16) + if isinstance(tp.result, model.StructOrUnion): + size_of_a = 'sizeof(%s) > %d ? sizeof(%s) : %d' % ( + tp.result.get_c_name(''), size_of_a, + tp.result.get_c_name(''), size_of_a) + prnt('%s%s' % (tag_and_space, tp.result.get_c_name(name_and_arguments))) + prnt('{') + prnt(' char a[%s];' % size_of_a) + prnt(' char *p = a;') + for i, type in enumerate(tp.args): + arg = 'a%d' % i + if (isinstance(type, model.StructOrUnion) or + may_need_128_bits(type)): + arg = '&' + arg + type = model.PointerType(type) + prnt(' *(%s)(p + %d) = %s;' % (type.get_c_name('*'), i*8, arg)) + prnt(' _cffi_call_python(&_cffi_externpy__%s, p);' % name) + if not isinstance(tp.result, model.VoidType): + prnt(' return *(%s)p;' % (tp.result.get_c_name('*'),)) + prnt('}') + prnt() + self._num_externpy += 1 + + def _generate_cpy_extern_python_decl(self, tp, name): + self._extern_python_decl(tp, name, 'static ') + + def _generate_cpy_dllexport_python_decl(self, tp, name): + self._extern_python_decl(tp, name, 'CFFI_DLLEXPORT ') + + def _generate_cpy_extern_python_plus_c_decl(self, tp, name): + self._extern_python_decl(tp, name, '') + + def _generate_cpy_extern_python_ctx(self, tp, name): + if self.target_is_python: + raise VerificationError( + "cannot use 'extern \"Python\"' in the ABI mode") + if tp.ellipsis: + raise NotImplementedError("a vararg function is extern \"Python\"") + type_index = self._typesdict[tp] + type_op = CffiOp(OP_EXTERN_PYTHON, type_index) + self._lsts["global"].append( + GlobalExpr(name, '&_cffi_externpy__%s' % name, type_op, name)) + + _generate_cpy_dllexport_python_ctx = \ + _generate_cpy_extern_python_plus_c_ctx = \ + _generate_cpy_extern_python_ctx + + def _print_string_literal_in_array(self, s): + prnt = self._prnt + prnt('// # NB. this is not a string because of a size limit in MSVC') + if not isinstance(s, bytes): # unicode + s = s.encode('utf-8') # -> bytes + else: + s.decode('utf-8') # got bytes, check for valid utf-8 + try: + s.decode('ascii') + except UnicodeDecodeError: + s = b'# -*- encoding: utf8 -*-\n' + s + for line in s.splitlines(True): + comment = line + if type('//') is bytes: # python2 + line = map(ord, line) # make a list of integers + else: # python3 + # type(line) is bytes, which enumerates like a list of integers + comment = ascii(comment)[1:-1] + prnt(('// ' + comment).rstrip()) + printed_line = '' + for c in line: + if len(printed_line) >= 76: + prnt(printed_line) + printed_line = '' + printed_line += '%d,' % (c,) + prnt(printed_line) + + # ---------- + # emitting the opcodes for individual types + + def _emit_bytecode_VoidType(self, tp, index): + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, PRIM_VOID) + + def _emit_bytecode_PrimitiveType(self, tp, index): + prim_index = PRIMITIVE_TO_INDEX[tp.name] + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, prim_index) + + def _emit_bytecode_UnknownIntegerType(self, tp, index): + s = ('_cffi_prim_int(sizeof(%s), (\n' + ' ((%s)-1) | 0 /* check that %s is an integer type */\n' + ' ) <= 0)' % (tp.name, tp.name, tp.name)) + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, s) + + def _emit_bytecode_UnknownFloatType(self, tp, index): + s = ('_cffi_prim_float(sizeof(%s) *\n' + ' (((%s)1) / 2) * 2 /* integer => 0, float => 1 */\n' + ' )' % (tp.name, tp.name)) + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, s) + + def _emit_bytecode_RawFunctionType(self, tp, index): + self.cffi_types[index] = CffiOp(OP_FUNCTION, self._typesdict[tp.result]) + index += 1 + for tp1 in tp.args: + realindex = self._typesdict[tp1] + if index != realindex: + if isinstance(tp1, model.PrimitiveType): + self._emit_bytecode_PrimitiveType(tp1, index) + else: + self.cffi_types[index] = CffiOp(OP_NOOP, realindex) + index += 1 + flags = int(tp.ellipsis) + if tp.abi is not None: + if tp.abi == '__stdcall': + flags |= 2 + else: + raise NotImplementedError("abi=%r" % (tp.abi,)) + self.cffi_types[index] = CffiOp(OP_FUNCTION_END, flags) + + def _emit_bytecode_PointerType(self, tp, index): + self.cffi_types[index] = CffiOp(OP_POINTER, self._typesdict[tp.totype]) + + _emit_bytecode_ConstPointerType = _emit_bytecode_PointerType + _emit_bytecode_NamedPointerType = _emit_bytecode_PointerType + + def _emit_bytecode_FunctionPtrType(self, tp, index): + raw = tp.as_raw_function() + self.cffi_types[index] = CffiOp(OP_POINTER, self._typesdict[raw]) + + def _emit_bytecode_ArrayType(self, tp, index): + item_index = self._typesdict[tp.item] + if tp.length is None: + self.cffi_types[index] = CffiOp(OP_OPEN_ARRAY, item_index) + elif tp.length == '...': + raise VerificationError( + "type %s badly placed: the '...' array length can only be " + "used on global arrays or on fields of structures" % ( + str(tp).replace('/*...*/', '...'),)) + else: + assert self.cffi_types[index + 1] == 'LEN' + self.cffi_types[index] = CffiOp(OP_ARRAY, item_index) + self.cffi_types[index + 1] = CffiOp(None, str(tp.length)) + + def _emit_bytecode_StructType(self, tp, index): + struct_index = self._struct_unions[tp] + self.cffi_types[index] = CffiOp(OP_STRUCT_UNION, struct_index) + _emit_bytecode_UnionType = _emit_bytecode_StructType + + def _emit_bytecode_EnumType(self, tp, index): + enum_index = self._enums[tp] + self.cffi_types[index] = CffiOp(OP_ENUM, enum_index) + + +if sys.version_info >= (3,): + NativeIO = io.StringIO +else: + class NativeIO(io.BytesIO): + def write(self, s): + if isinstance(s, unicode): + s = s.encode('ascii') + super(NativeIO, self).write(s) + +def _is_file_like(maybefile): + # compare to xml.etree.ElementTree._get_writer + return hasattr(maybefile, 'write') + +def _make_c_or_py_source(ffi, module_name, preamble, target_file, verbose): + if verbose: + print("generating %s" % (target_file,)) + recompiler = Recompiler(ffi, module_name, + target_is_python=(preamble is None)) + recompiler.collect_type_table() + recompiler.collect_step_tables() + if _is_file_like(target_file): + recompiler.write_source_to_f(target_file, preamble) + return True + f = NativeIO() + recompiler.write_source_to_f(f, preamble) + output = f.getvalue() + try: + with open(target_file, 'r') as f1: + if f1.read(len(output) + 1) != output: + raise IOError + if verbose: + print("(already up-to-date)") + return False # already up-to-date + except IOError: + tmp_file = '%s.~%d' % (target_file, os.getpid()) + with open(tmp_file, 'w') as f1: + f1.write(output) + try: + os.rename(tmp_file, target_file) + except OSError: + os.unlink(target_file) + os.rename(tmp_file, target_file) + return True + +def make_c_source(ffi, module_name, preamble, target_c_file, verbose=False): + assert preamble is not None + return _make_c_or_py_source(ffi, module_name, preamble, target_c_file, + verbose) + +def make_py_source(ffi, module_name, target_py_file, verbose=False): + return _make_c_or_py_source(ffi, module_name, None, target_py_file, + verbose) + +def _modname_to_file(outputdir, modname, extension): + parts = modname.split('.') + try: + os.makedirs(os.path.join(outputdir, *parts[:-1])) + except OSError: + pass + parts[-1] += extension + return os.path.join(outputdir, *parts), parts + + +# Aaargh. Distutils is not tested at all for the purpose of compiling +# DLLs that are not extension modules. Here are some hacks to work +# around that, in the _patch_for_*() functions... + +def _patch_meth(patchlist, cls, name, new_meth): + old = getattr(cls, name) + patchlist.append((cls, name, old)) + setattr(cls, name, new_meth) + return old + +def _unpatch_meths(patchlist): + for cls, name, old_meth in reversed(patchlist): + setattr(cls, name, old_meth) + +def _patch_for_embedding(patchlist): + if sys.platform == 'win32': + # we must not remove the manifest when building for embedding! + # FUTURE: this module was removed in setuptools 74; this is likely dead code and should be removed, + # since the toolchain it supports (VS2005-2008) is also long dead. + from cffi._shimmed_dist_utils import MSVCCompiler + if MSVCCompiler is not None: + _patch_meth(patchlist, MSVCCompiler, '_remove_visual_c_ref', + lambda self, manifest_file: manifest_file) + + if sys.platform == 'darwin': + # we must not make a '-bundle', but a '-dynamiclib' instead + from cffi._shimmed_dist_utils import CCompiler + def my_link_shared_object(self, *args, **kwds): + if '-bundle' in self.linker_so: + self.linker_so = list(self.linker_so) + i = self.linker_so.index('-bundle') + self.linker_so[i] = '-dynamiclib' + return old_link_shared_object(self, *args, **kwds) + old_link_shared_object = _patch_meth(patchlist, CCompiler, + 'link_shared_object', + my_link_shared_object) + +def _patch_for_target(patchlist, target): + from cffi._shimmed_dist_utils import build_ext + # if 'target' is different from '*', we need to patch some internal + # method to just return this 'target' value, instead of having it + # built from module_name + if target.endswith('.*'): + target = target[:-2] + if sys.platform == 'win32': + target += '.dll' + elif sys.platform == 'darwin': + target += '.dylib' + else: + target += '.so' + _patch_meth(patchlist, build_ext, 'get_ext_filename', + lambda self, ext_name: target) + + +def recompile(ffi, module_name, preamble, tmpdir='.', call_c_compiler=True, + c_file=None, source_extension='.c', extradir=None, + compiler_verbose=1, target=None, debug=None, + uses_ffiplatform=True, **kwds): + if not isinstance(module_name, str): + module_name = module_name.encode('ascii') + if ffi._windows_unicode: + ffi._apply_windows_unicode(kwds) + if preamble is not None: + if call_c_compiler and _is_file_like(c_file): + raise TypeError("Writing to file-like objects is not supported " + "with call_c_compiler=True") + embedding = (ffi._embedding is not None) + if embedding: + ffi._apply_embedding_fix(kwds) + if c_file is None: + c_file, parts = _modname_to_file(tmpdir, module_name, + source_extension) + if extradir: + parts = [extradir] + parts + ext_c_file = os.path.join(*parts) + else: + ext_c_file = c_file + # + if target is None: + if embedding: + target = '%s.*' % module_name + else: + target = '*' + # + if uses_ffiplatform: + ext = ffiplatform.get_extension(ext_c_file, module_name, **kwds) + else: + ext = None + updated = make_c_source(ffi, module_name, preamble, c_file, + verbose=compiler_verbose) + if call_c_compiler: + patchlist = [] + cwd = os.getcwd() + try: + if embedding: + _patch_for_embedding(patchlist) + if target != '*': + _patch_for_target(patchlist, target) + if compiler_verbose: + if tmpdir == '.': + msg = 'the current directory is' + else: + msg = 'setting the current directory to' + print('%s %r' % (msg, os.path.abspath(tmpdir))) + os.chdir(tmpdir) + outputfilename = ffiplatform.compile('.', ext, + compiler_verbose, debug) + finally: + os.chdir(cwd) + _unpatch_meths(patchlist) + return outputfilename + else: + return ext, updated + else: + if c_file is None: + c_file, _ = _modname_to_file(tmpdir, module_name, '.py') + updated = make_py_source(ffi, module_name, c_file, + verbose=compiler_verbose) + if call_c_compiler: + return c_file + else: + return None, updated + diff --git a/venv/lib/python3.12/site-packages/cffi/setuptools_ext.py b/venv/lib/python3.12/site-packages/cffi/setuptools_ext.py new file mode 100644 index 00000000..5cdd246f --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi/setuptools_ext.py @@ -0,0 +1,229 @@ +import os +import sys +import sysconfig + +try: + basestring +except NameError: + # Python 3.x + basestring = str + +def error(msg): + from cffi._shimmed_dist_utils import DistutilsSetupError + raise DistutilsSetupError(msg) + + +def execfile(filename, glob): + # We use execfile() (here rewritten for Python 3) instead of + # __import__() to load the build script. The problem with + # a normal import is that in some packages, the intermediate + # __init__.py files may already try to import the file that + # we are generating. + with open(filename) as f: + src = f.read() + src += '\n' # Python 2.6 compatibility + code = compile(src, filename, 'exec') + exec(code, glob, glob) + + +def add_cffi_module(dist, mod_spec): + from cffi.api import FFI + + if not isinstance(mod_spec, basestring): + error("argument to 'cffi_modules=...' must be a str or a list of str," + " not %r" % (type(mod_spec).__name__,)) + mod_spec = str(mod_spec) + try: + build_file_name, ffi_var_name = mod_spec.split(':') + except ValueError: + error("%r must be of the form 'path/build.py:ffi_variable'" % + (mod_spec,)) + if not os.path.exists(build_file_name): + ext = '' + rewritten = build_file_name.replace('.', '/') + '.py' + if os.path.exists(rewritten): + ext = ' (rewrite cffi_modules to [%r])' % ( + rewritten + ':' + ffi_var_name,) + error("%r does not name an existing file%s" % (build_file_name, ext)) + + mod_vars = {'__name__': '__cffi__', '__file__': build_file_name} + execfile(build_file_name, mod_vars) + + try: + ffi = mod_vars[ffi_var_name] + except KeyError: + error("%r: object %r not found in module" % (mod_spec, + ffi_var_name)) + if not isinstance(ffi, FFI): + ffi = ffi() # maybe it's a function instead of directly an ffi + if not isinstance(ffi, FFI): + error("%r is not an FFI instance (got %r)" % (mod_spec, + type(ffi).__name__)) + if not hasattr(ffi, '_assigned_source'): + error("%r: the set_source() method was not called" % (mod_spec,)) + module_name, source, source_extension, kwds = ffi._assigned_source + if ffi._windows_unicode: + kwds = kwds.copy() + ffi._apply_windows_unicode(kwds) + + if source is None: + _add_py_module(dist, ffi, module_name) + else: + _add_c_module(dist, ffi, module_name, source, source_extension, kwds) + +def _set_py_limited_api(Extension, kwds): + """ + Add py_limited_api to kwds if setuptools >= 26 is in use. + Do not alter the setting if it already exists. + Setuptools takes care of ignoring the flag on Python 2 and PyPy. + + CPython itself should ignore the flag in a debugging version + (by not listing .abi3.so in the extensions it supports), but + it doesn't so far, creating troubles. That's why we check + for "not hasattr(sys, 'gettotalrefcount')" (the 2.7 compatible equivalent + of 'd' not in sys.abiflags). (http://bugs.python.org/issue28401) + + On Windows, with CPython <= 3.4, it's better not to use py_limited_api + because virtualenv *still* doesn't copy PYTHON3.DLL on these versions. + Recently (2020) we started shipping only >= 3.5 wheels, though. So + we'll give it another try and set py_limited_api on Windows >= 3.5. + """ + from cffi._shimmed_dist_utils import log + from cffi import recompiler + + if ('py_limited_api' not in kwds and not hasattr(sys, 'gettotalrefcount') + and recompiler.USE_LIMITED_API): + import setuptools + try: + setuptools_major_version = int(setuptools.__version__.partition('.')[0]) + if setuptools_major_version >= 26: + kwds['py_limited_api'] = True + except ValueError: # certain development versions of setuptools + # If we don't know the version number of setuptools, we + # try to set 'py_limited_api' anyway. At worst, we get a + # warning. + kwds['py_limited_api'] = True + + if sysconfig.get_config_var("Py_GIL_DISABLED"): + if kwds.get('py_limited_api'): + log.info("Ignoring py_limited_api=True for free-threaded build.") + + kwds['py_limited_api'] = False + + if kwds.get('py_limited_api') is False: + # avoid setting Py_LIMITED_API if py_limited_api=False + # which _cffi_include.h does unless _CFFI_NO_LIMITED_API is defined + kwds.setdefault("define_macros", []).append(("_CFFI_NO_LIMITED_API", None)) + return kwds + +def _add_c_module(dist, ffi, module_name, source, source_extension, kwds): + # We are a setuptools extension. Need this build_ext for py_limited_api. + from setuptools.command.build_ext import build_ext + from cffi._shimmed_dist_utils import Extension, log, mkpath + from cffi import recompiler + + allsources = ['$PLACEHOLDER'] + allsources.extend(kwds.pop('sources', [])) + kwds = _set_py_limited_api(Extension, kwds) + ext = Extension(name=module_name, sources=allsources, **kwds) + + def make_mod(tmpdir, pre_run=None): + c_file = os.path.join(tmpdir, module_name + source_extension) + log.info("generating cffi module %r" % c_file) + mkpath(tmpdir) + # a setuptools-only, API-only hook: called with the "ext" and "ffi" + # arguments just before we turn the ffi into C code. To use it, + # subclass the 'distutils.command.build_ext.build_ext' class and + # add a method 'def pre_run(self, ext, ffi)'. + if pre_run is not None: + pre_run(ext, ffi) + updated = recompiler.make_c_source(ffi, module_name, source, c_file) + if not updated: + log.info("already up-to-date") + return c_file + + if dist.ext_modules is None: + dist.ext_modules = [] + dist.ext_modules.append(ext) + + base_class = dist.cmdclass.get('build_ext', build_ext) + class build_ext_make_mod(base_class): + def run(self): + if ext.sources[0] == '$PLACEHOLDER': + pre_run = getattr(self, 'pre_run', None) + ext.sources[0] = make_mod(self.build_temp, pre_run) + base_class.run(self) + dist.cmdclass['build_ext'] = build_ext_make_mod + # NB. multiple runs here will create multiple 'build_ext_make_mod' + # classes. Even in this case the 'build_ext' command should be + # run once; but just in case, the logic above does nothing if + # called again. + + +def _add_py_module(dist, ffi, module_name): + from setuptools.command.build_py import build_py + from setuptools.command.build_ext import build_ext + from cffi._shimmed_dist_utils import log, mkpath + from cffi import recompiler + + def generate_mod(py_file): + log.info("generating cffi module %r" % py_file) + mkpath(os.path.dirname(py_file)) + updated = recompiler.make_py_source(ffi, module_name, py_file) + if not updated: + log.info("already up-to-date") + + base_class = dist.cmdclass.get('build_py', build_py) + class build_py_make_mod(base_class): + def run(self): + base_class.run(self) + module_path = module_name.split('.') + module_path[-1] += '.py' + generate_mod(os.path.join(self.build_lib, *module_path)) + def get_source_files(self): + # This is called from 'setup.py sdist' only. Exclude + # the generate .py module in this case. + saved_py_modules = self.py_modules + try: + if saved_py_modules: + self.py_modules = [m for m in saved_py_modules + if m != module_name] + return base_class.get_source_files(self) + finally: + self.py_modules = saved_py_modules + dist.cmdclass['build_py'] = build_py_make_mod + + # distutils and setuptools have no notion I could find of a + # generated python module. If we don't add module_name to + # dist.py_modules, then things mostly work but there are some + # combination of options (--root and --record) that will miss + # the module. So we add it here, which gives a few apparently + # harmless warnings about not finding the file outside the + # build directory. + # Then we need to hack more in get_source_files(); see above. + if dist.py_modules is None: + dist.py_modules = [] + dist.py_modules.append(module_name) + + # the following is only for "build_ext -i" + base_class_2 = dist.cmdclass.get('build_ext', build_ext) + class build_ext_make_mod(base_class_2): + def run(self): + base_class_2.run(self) + if self.inplace: + # from get_ext_fullpath() in distutils/command/build_ext.py + module_path = module_name.split('.') + package = '.'.join(module_path[:-1]) + build_py = self.get_finalized_command('build_py') + package_dir = build_py.get_package_dir(package) + file_name = module_path[-1] + '.py' + generate_mod(os.path.join(package_dir, file_name)) + dist.cmdclass['build_ext'] = build_ext_make_mod + +def cffi_modules(dist, attr, value): + assert attr == 'cffi_modules' + if isinstance(value, basestring): + value = [value] + + for cffi_module in value: + add_cffi_module(dist, cffi_module) diff --git a/venv/lib/python3.12/site-packages/cffi/vengine_cpy.py b/venv/lib/python3.12/site-packages/cffi/vengine_cpy.py new file mode 100644 index 00000000..02e6a471 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi/vengine_cpy.py @@ -0,0 +1,1087 @@ +# +# DEPRECATED: implementation for ffi.verify() +# +import sys +from . import model +from .error import VerificationError +from . import _imp_emulation as imp + + +class VCPythonEngine(object): + _class_key = 'x' + _gen_python_module = True + + def __init__(self, verifier): + self.verifier = verifier + self.ffi = verifier.ffi + self._struct_pending_verification = {} + self._types_of_builtin_functions = {} + + def patch_extension_kwds(self, kwds): + pass + + def find_module(self, module_name, path, so_suffixes): + try: + f, filename, descr = imp.find_module(module_name, path) + except ImportError: + return None + if f is not None: + f.close() + # Note that after a setuptools installation, there are both .py + # and .so files with the same basename. The code here relies on + # imp.find_module() locating the .so in priority. + if descr[0] not in so_suffixes: + return None + return filename + + def collect_types(self): + self._typesdict = {} + self._generate("collecttype") + + def _prnt(self, what=''): + self._f.write(what + '\n') + + def _gettypenum(self, type): + # a KeyError here is a bug. please report it! :-) + return self._typesdict[type] + + def _do_collect_type(self, tp): + if ((not isinstance(tp, model.PrimitiveType) + or tp.name == 'long double') + and tp not in self._typesdict): + num = len(self._typesdict) + self._typesdict[tp] = num + + def write_source_to_f(self): + self.collect_types() + # + # The new module will have a _cffi_setup() function that receives + # objects from the ffi world, and that calls some setup code in + # the module. This setup code is split in several independent + # functions, e.g. one per constant. The functions are "chained" + # by ending in a tail call to each other. + # + # This is further split in two chained lists, depending on if we + # can do it at import-time or if we must wait for _cffi_setup() to + # provide us with the objects. This is needed because we + # need the values of the enum constants in order to build the + # that we may have to pass to _cffi_setup(). + # + # The following two 'chained_list_constants' items contains + # the head of these two chained lists, as a string that gives the + # call to do, if any. + self._chained_list_constants = ['((void)lib,0)', '((void)lib,0)'] + # + prnt = self._prnt + # first paste some standard set of lines that are mostly '#define' + prnt(cffimod_header) + prnt() + # then paste the C source given by the user, verbatim. + prnt(self.verifier.preamble) + prnt() + # + # call generate_cpy_xxx_decl(), for every xxx found from + # ffi._parser._declarations. This generates all the functions. + self._generate("decl") + # + # implement the function _cffi_setup_custom() as calling the + # head of the chained list. + self._generate_setup_custom() + prnt() + # + # produce the method table, including the entries for the + # generated Python->C function wrappers, which are done + # by generate_cpy_function_method(). + prnt('static PyMethodDef _cffi_methods[] = {') + self._generate("method") + prnt(' {"_cffi_setup", _cffi_setup, METH_VARARGS, NULL},') + prnt(' {NULL, NULL, 0, NULL} /* Sentinel */') + prnt('};') + prnt() + # + # standard init. + modname = self.verifier.get_module_name() + constants = self._chained_list_constants[False] + prnt('#if PY_MAJOR_VERSION >= 3') + prnt() + prnt('static struct PyModuleDef _cffi_module_def = {') + prnt(' PyModuleDef_HEAD_INIT,') + prnt(' "%s",' % modname) + prnt(' NULL,') + prnt(' -1,') + prnt(' _cffi_methods,') + prnt(' NULL, NULL, NULL, NULL') + prnt('};') + prnt() + prnt('PyMODINIT_FUNC') + prnt('PyInit_%s(void)' % modname) + prnt('{') + prnt(' PyObject *lib;') + prnt(' lib = PyModule_Create(&_cffi_module_def);') + prnt(' if (lib == NULL)') + prnt(' return NULL;') + prnt(' if (%s < 0 || _cffi_init() < 0) {' % (constants,)) + prnt(' Py_DECREF(lib);') + prnt(' return NULL;') + prnt(' }') + prnt('#if Py_GIL_DISABLED') + prnt(' PyUnstable_Module_SetGIL(lib, Py_MOD_GIL_NOT_USED);') + prnt('#endif') + prnt(' return lib;') + prnt('}') + prnt() + prnt('#else') + prnt() + prnt('PyMODINIT_FUNC') + prnt('init%s(void)' % modname) + prnt('{') + prnt(' PyObject *lib;') + prnt(' lib = Py_InitModule("%s", _cffi_methods);' % modname) + prnt(' if (lib == NULL)') + prnt(' return;') + prnt(' if (%s < 0 || _cffi_init() < 0)' % (constants,)) + prnt(' return;') + prnt(' return;') + prnt('}') + prnt() + prnt('#endif') + + def load_library(self, flags=None): + # XXX review all usages of 'self' here! + # import it as a new extension module + imp.acquire_lock() + try: + if hasattr(sys, "getdlopenflags"): + previous_flags = sys.getdlopenflags() + try: + if hasattr(sys, "setdlopenflags") and flags is not None: + sys.setdlopenflags(flags) + module = imp.load_dynamic(self.verifier.get_module_name(), + self.verifier.modulefilename) + except ImportError as e: + error = "importing %r: %s" % (self.verifier.modulefilename, e) + raise VerificationError(error) + finally: + if hasattr(sys, "setdlopenflags"): + sys.setdlopenflags(previous_flags) + finally: + imp.release_lock() + # + # call loading_cpy_struct() to get the struct layout inferred by + # the C compiler + self._load(module, 'loading') + # + # the C code will need the objects. Collect them in + # order in a list. + revmapping = dict([(value, key) + for (key, value) in self._typesdict.items()]) + lst = [revmapping[i] for i in range(len(revmapping))] + lst = list(map(self.ffi._get_cached_btype, lst)) + # + # build the FFILibrary class and instance and call _cffi_setup(). + # this will set up some fields like '_cffi_types', and only then + # it will invoke the chained list of functions that will really + # build (notably) the constant objects, as if they are + # pointers, and store them as attributes on the 'library' object. + class FFILibrary(object): + _cffi_python_module = module + _cffi_ffi = self.ffi + _cffi_dir = [] + def __dir__(self): + return FFILibrary._cffi_dir + list(self.__dict__) + library = FFILibrary() + if module._cffi_setup(lst, VerificationError, library): + import warnings + warnings.warn("reimporting %r might overwrite older definitions" + % (self.verifier.get_module_name())) + # + # finally, call the loaded_cpy_xxx() functions. This will perform + # the final adjustments, like copying the Python->C wrapper + # functions from the module to the 'library' object, and setting + # up the FFILibrary class with properties for the global C variables. + self._load(module, 'loaded', library=library) + module._cffi_original_ffi = self.ffi + module._cffi_types_of_builtin_funcs = self._types_of_builtin_functions + return library + + def _get_declarations(self): + lst = [(key, tp) for (key, (tp, qual)) in + self.ffi._parser._declarations.items()] + lst.sort() + return lst + + def _generate(self, step_name): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + try: + method = getattr(self, '_generate_cpy_%s_%s' % (kind, + step_name)) + except AttributeError: + raise VerificationError( + "not implemented in verify(): %r" % name) + try: + method(tp, realname) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _load(self, module, step_name, **kwds): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + method = getattr(self, '_%s_cpy_%s' % (step_name, kind)) + try: + method(tp, realname, module, **kwds) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _generate_nothing(self, tp, name): + pass + + def _loaded_noop(self, tp, name, module, **kwds): + pass + + # ---------- + + def _convert_funcarg_to_c(self, tp, fromvar, tovar, errcode): + extraarg = '' + if isinstance(tp, model.PrimitiveType): + if tp.is_integer_type() and tp.name != '_Bool': + converter = '_cffi_to_c_int' + extraarg = ', %s' % tp.name + elif tp.is_complex_type(): + raise VerificationError( + "not implemented in verify(): complex types") + else: + converter = '(%s)_cffi_to_c_%s' % (tp.get_c_name(''), + tp.name.replace(' ', '_')) + errvalue = '-1' + # + elif isinstance(tp, model.PointerType): + self._convert_funcarg_to_c_ptr_or_array(tp, fromvar, + tovar, errcode) + return + # + elif isinstance(tp, (model.StructOrUnion, model.EnumType)): + # a struct (not a struct pointer) as a function argument + self._prnt(' if (_cffi_to_c((char *)&%s, _cffi_type(%d), %s) < 0)' + % (tovar, self._gettypenum(tp), fromvar)) + self._prnt(' %s;' % errcode) + return + # + elif isinstance(tp, model.FunctionPtrType): + converter = '(%s)_cffi_to_c_pointer' % tp.get_c_name('') + extraarg = ', _cffi_type(%d)' % self._gettypenum(tp) + errvalue = 'NULL' + # + else: + raise NotImplementedError(tp) + # + self._prnt(' %s = %s(%s%s);' % (tovar, converter, fromvar, extraarg)) + self._prnt(' if (%s == (%s)%s && PyErr_Occurred())' % ( + tovar, tp.get_c_name(''), errvalue)) + self._prnt(' %s;' % errcode) + + def _extra_local_variables(self, tp, localvars, freelines): + if isinstance(tp, model.PointerType): + localvars.add('Py_ssize_t datasize') + localvars.add('struct _cffi_freeme_s *large_args_free = NULL') + freelines.add('if (large_args_free != NULL)' + ' _cffi_free_array_arguments(large_args_free);') + + def _convert_funcarg_to_c_ptr_or_array(self, tp, fromvar, tovar, errcode): + self._prnt(' datasize = _cffi_prepare_pointer_call_argument(') + self._prnt(' _cffi_type(%d), %s, (char **)&%s);' % ( + self._gettypenum(tp), fromvar, tovar)) + self._prnt(' if (datasize != 0) {') + self._prnt(' %s = ((size_t)datasize) <= 640 ? ' + 'alloca((size_t)datasize) : NULL;' % (tovar,)) + self._prnt(' if (_cffi_convert_array_argument(_cffi_type(%d), %s, ' + '(char **)&%s,' % (self._gettypenum(tp), fromvar, tovar)) + self._prnt(' datasize, &large_args_free) < 0)') + self._prnt(' %s;' % errcode) + self._prnt(' }') + + def _convert_expr_from_c(self, tp, var, context): + if isinstance(tp, model.PrimitiveType): + if tp.is_integer_type() and tp.name != '_Bool': + return '_cffi_from_c_int(%s, %s)' % (var, tp.name) + elif tp.name != 'long double': + return '_cffi_from_c_%s(%s)' % (tp.name.replace(' ', '_'), var) + else: + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, (model.PointerType, model.FunctionPtrType)): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.ArrayType): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(model.PointerType(tp.item))) + elif isinstance(tp, model.StructOrUnion): + if tp.fldnames is None: + raise TypeError("'%s' is used as %s, but is opaque" % ( + tp._get_c_name(), context)) + return '_cffi_from_c_struct((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.EnumType): + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + else: + raise NotImplementedError(tp) + + # ---------- + # typedefs: generates no code so far + + _generate_cpy_typedef_collecttype = _generate_nothing + _generate_cpy_typedef_decl = _generate_nothing + _generate_cpy_typedef_method = _generate_nothing + _loading_cpy_typedef = _loaded_noop + _loaded_cpy_typedef = _loaded_noop + + # ---------- + # function declarations + + def _generate_cpy_function_collecttype(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + self._do_collect_type(tp) + else: + # don't call _do_collect_type(tp) in this common case, + # otherwise test_autofilled_struct_as_argument fails + for type in tp.args: + self._do_collect_type(type) + self._do_collect_type(tp.result) + + def _generate_cpy_function_decl(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + # cannot support vararg functions better than this: check for its + # exact type (including the fixed arguments), and build it as a + # constant function pointer (no CPython wrapper) + self._generate_cpy_const(False, name, tp) + return + prnt = self._prnt + numargs = len(tp.args) + if numargs == 0: + argname = 'noarg' + elif numargs == 1: + argname = 'arg0' + else: + argname = 'args' + prnt('static PyObject *') + prnt('_cffi_f_%s(PyObject *self, PyObject *%s)' % (name, argname)) + prnt('{') + # + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + prnt(' %s;' % type.get_c_name(' x%d' % i, context)) + # + localvars = set() + freelines = set() + for type in tp.args: + self._extra_local_variables(type, localvars, freelines) + for decl in sorted(localvars): + prnt(' %s;' % (decl,)) + # + if not isinstance(tp.result, model.VoidType): + result_code = 'result = ' + context = 'result of %s' % name + prnt(' %s;' % tp.result.get_c_name(' result', context)) + prnt(' PyObject *pyresult;') + else: + result_code = '' + # + if len(tp.args) > 1: + rng = range(len(tp.args)) + for i in rng: + prnt(' PyObject *arg%d;' % i) + prnt() + prnt(' if (!PyArg_ParseTuple(args, "%s:%s", %s))' % ( + 'O' * numargs, name, ', '.join(['&arg%d' % i for i in rng]))) + prnt(' return NULL;') + prnt() + # + for i, type in enumerate(tp.args): + self._convert_funcarg_to_c(type, 'arg%d' % i, 'x%d' % i, + 'return NULL') + prnt() + # + prnt(' Py_BEGIN_ALLOW_THREADS') + prnt(' _cffi_restore_errno();') + prnt(' { %s%s(%s); }' % ( + result_code, name, + ', '.join(['x%d' % i for i in range(len(tp.args))]))) + prnt(' _cffi_save_errno();') + prnt(' Py_END_ALLOW_THREADS') + prnt() + # + prnt(' (void)self; /* unused */') + if numargs == 0: + prnt(' (void)noarg; /* unused */') + if result_code: + prnt(' pyresult = %s;' % + self._convert_expr_from_c(tp.result, 'result', 'result type')) + for freeline in freelines: + prnt(' ' + freeline) + prnt(' return pyresult;') + else: + for freeline in freelines: + prnt(' ' + freeline) + prnt(' Py_INCREF(Py_None);') + prnt(' return Py_None;') + prnt('}') + prnt() + + def _generate_cpy_function_method(self, tp, name): + if tp.ellipsis: + return + numargs = len(tp.args) + if numargs == 0: + meth = 'METH_NOARGS' + elif numargs == 1: + meth = 'METH_O' + else: + meth = 'METH_VARARGS' + self._prnt(' {"%s", _cffi_f_%s, %s, NULL},' % (name, name, meth)) + + _loading_cpy_function = _loaded_noop + + def _loaded_cpy_function(self, tp, name, module, library): + if tp.ellipsis: + return + func = getattr(module, name) + setattr(library, name, func) + self._types_of_builtin_functions[func] = tp + + # ---------- + # named structs + + _generate_cpy_struct_collecttype = _generate_nothing + def _generate_cpy_struct_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'struct', name) + def _generate_cpy_struct_method(self, tp, name): + self._generate_struct_or_union_method(tp, 'struct', name) + def _loading_cpy_struct(self, tp, name, module): + self._loading_struct_or_union(tp, 'struct', name, module) + def _loaded_cpy_struct(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + _generate_cpy_union_collecttype = _generate_nothing + def _generate_cpy_union_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'union', name) + def _generate_cpy_union_method(self, tp, name): + self._generate_struct_or_union_method(tp, 'union', name) + def _loading_cpy_union(self, tp, name, module): + self._loading_struct_or_union(tp, 'union', name, module) + def _loaded_cpy_union(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + def _generate_struct_or_union_decl(self, tp, prefix, name): + if tp.fldnames is None: + return # nothing to do with opaque structs + checkfuncname = '_cffi_check_%s_%s' % (prefix, name) + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + cname = ('%s %s' % (prefix, name)).strip() + # + prnt = self._prnt + prnt('static void %s(%s *p)' % (checkfuncname, cname)) + prnt('{') + prnt(' /* only to generate compile-time warnings or errors */') + prnt(' (void)p;') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if (isinstance(ftype, model.PrimitiveType) + and ftype.is_integer_type()) or fbitsize >= 0: + # accept all integers, but complain on float or double + prnt(' (void)((p->%s) << 1);' % fname) + else: + # only accept exactly the type declared. + try: + prnt(' { %s = &p->%s; (void)tmp; }' % ( + ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual), + fname)) + except VerificationError as e: + prnt(' /* %s */' % str(e)) # cannot verify it, ignore + prnt('}') + prnt('static PyObject *') + prnt('%s(PyObject *self, PyObject *noarg)' % (layoutfuncname,)) + prnt('{') + prnt(' struct _cffi_aligncheck { char x; %s y; };' % cname) + prnt(' static Py_ssize_t nums[] = {') + prnt(' sizeof(%s),' % cname) + prnt(' offsetof(struct _cffi_aligncheck, y),') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + prnt(' offsetof(%s, %s),' % (cname, fname)) + if isinstance(ftype, model.ArrayType) and ftype.length is None: + prnt(' 0, /* %s */' % ftype._get_c_name()) + else: + prnt(' sizeof(((%s *)0)->%s),' % (cname, fname)) + prnt(' -1') + prnt(' };') + prnt(' (void)self; /* unused */') + prnt(' (void)noarg; /* unused */') + prnt(' return _cffi_get_struct_layout(nums);') + prnt(' /* the next line is not executed, but compiled */') + prnt(' %s(0);' % (checkfuncname,)) + prnt('}') + prnt() + + def _generate_struct_or_union_method(self, tp, prefix, name): + if tp.fldnames is None: + return # nothing to do with opaque structs + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + self._prnt(' {"%s", %s, METH_NOARGS, NULL},' % (layoutfuncname, + layoutfuncname)) + + def _loading_struct_or_union(self, tp, prefix, name, module): + if tp.fldnames is None: + return # nothing to do with opaque structs + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + # + function = getattr(module, layoutfuncname) + layout = function() + if isinstance(tp, model.StructOrUnion) and tp.partial: + # use the function()'s sizes and offsets to guide the + # layout of the struct + totalsize = layout[0] + totalalignment = layout[1] + fieldofs = layout[2::2] + fieldsize = layout[3::2] + tp.force_flatten() + assert len(fieldofs) == len(fieldsize) == len(tp.fldnames) + tp.fixedlayout = fieldofs, fieldsize, totalsize, totalalignment + else: + cname = ('%s %s' % (prefix, name)).strip() + self._struct_pending_verification[tp] = layout, cname + + def _loaded_struct_or_union(self, tp): + if tp.fldnames is None: + return # nothing to do with opaque structs + self.ffi._get_cached_btype(tp) # force 'fixedlayout' to be considered + + if tp in self._struct_pending_verification: + # check that the layout sizes and offsets match the real ones + def check(realvalue, expectedvalue, msg): + if realvalue != expectedvalue: + raise VerificationError( + "%s (we have %d, but C compiler says %d)" + % (msg, expectedvalue, realvalue)) + ffi = self.ffi + BStruct = ffi._get_cached_btype(tp) + layout, cname = self._struct_pending_verification.pop(tp) + check(layout[0], ffi.sizeof(BStruct), "wrong total size") + check(layout[1], ffi.alignof(BStruct), "wrong total alignment") + i = 2 + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + check(layout[i], ffi.offsetof(BStruct, fname), + "wrong offset for field %r" % (fname,)) + if layout[i+1] != 0: + BField = ffi._get_cached_btype(ftype) + check(layout[i+1], ffi.sizeof(BField), + "wrong size for field %r" % (fname,)) + i += 2 + assert i == len(layout) + + # ---------- + # 'anonymous' declarations. These are produced for anonymous structs + # or unions; the 'name' is obtained by a typedef. + + _generate_cpy_anonymous_collecttype = _generate_nothing + + def _generate_cpy_anonymous_decl(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_cpy_enum_decl(tp, name, '') + else: + self._generate_struct_or_union_decl(tp, '', name) + + def _generate_cpy_anonymous_method(self, tp, name): + if not isinstance(tp, model.EnumType): + self._generate_struct_or_union_method(tp, '', name) + + def _loading_cpy_anonymous(self, tp, name, module): + if isinstance(tp, model.EnumType): + self._loading_cpy_enum(tp, name, module) + else: + self._loading_struct_or_union(tp, '', name, module) + + def _loaded_cpy_anonymous(self, tp, name, module, **kwds): + if isinstance(tp, model.EnumType): + self._loaded_cpy_enum(tp, name, module, **kwds) + else: + self._loaded_struct_or_union(tp) + + # ---------- + # constants, likely declared with '#define' + + def _generate_cpy_const(self, is_int, name, tp=None, category='const', + vartp=None, delayed=True, size_too=False, + check_value=None): + prnt = self._prnt + funcname = '_cffi_%s_%s' % (category, name) + prnt('static int %s(PyObject *lib)' % funcname) + prnt('{') + prnt(' PyObject *o;') + prnt(' int res;') + if not is_int: + prnt(' %s;' % (vartp or tp).get_c_name(' i', name)) + else: + assert category == 'const' + # + if check_value is not None: + self._check_int_constant_value(name, check_value) + # + if not is_int: + if category == 'var': + realexpr = '&' + name + else: + realexpr = name + prnt(' i = (%s);' % (realexpr,)) + prnt(' o = %s;' % (self._convert_expr_from_c(tp, 'i', + 'variable type'),)) + assert delayed + else: + prnt(' o = _cffi_from_c_int_const(%s);' % name) + prnt(' if (o == NULL)') + prnt(' return -1;') + if size_too: + prnt(' {') + prnt(' PyObject *o1 = o;') + prnt(' o = Py_BuildValue("On", o1, (Py_ssize_t)sizeof(%s));' + % (name,)) + prnt(' Py_DECREF(o1);') + prnt(' if (o == NULL)') + prnt(' return -1;') + prnt(' }') + prnt(' res = PyObject_SetAttrString(lib, "%s", o);' % name) + prnt(' Py_DECREF(o);') + prnt(' if (res < 0)') + prnt(' return -1;') + prnt(' return %s;' % self._chained_list_constants[delayed]) + self._chained_list_constants[delayed] = funcname + '(lib)' + prnt('}') + prnt() + + def _generate_cpy_constant_collecttype(self, tp, name): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + if not is_int: + self._do_collect_type(tp) + + def _generate_cpy_constant_decl(self, tp, name): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + self._generate_cpy_const(is_int, name, tp) + + _generate_cpy_constant_method = _generate_nothing + _loading_cpy_constant = _loaded_noop + _loaded_cpy_constant = _loaded_noop + + # ---------- + # enums + + def _check_int_constant_value(self, name, value, err_prefix=''): + prnt = self._prnt + if value <= 0: + prnt(' if ((%s) > 0 || (long)(%s) != %dL) {' % ( + name, name, value)) + else: + prnt(' if ((%s) <= 0 || (unsigned long)(%s) != %dUL) {' % ( + name, name, value)) + prnt(' char buf[64];') + prnt(' if ((%s) <= 0)' % name) + prnt(' snprintf(buf, 63, "%%ld", (long)(%s));' % name) + prnt(' else') + prnt(' snprintf(buf, 63, "%%lu", (unsigned long)(%s));' % + name) + prnt(' PyErr_Format(_cffi_VerificationError,') + prnt(' "%s%s has the real value %s, not %s",') + prnt(' "%s", "%s", buf, "%d");' % ( + err_prefix, name, value)) + prnt(' return -1;') + prnt(' }') + + def _enum_funcname(self, prefix, name): + # "$enum_$1" => "___D_enum____D_1" + name = name.replace('$', '___D_') + return '_cffi_e_%s_%s' % (prefix, name) + + def _generate_cpy_enum_decl(self, tp, name, prefix='enum'): + if tp.partial: + for enumerator in tp.enumerators: + self._generate_cpy_const(True, enumerator, delayed=False) + return + # + funcname = self._enum_funcname(prefix, name) + prnt = self._prnt + prnt('static int %s(PyObject *lib)' % funcname) + prnt('{') + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + self._check_int_constant_value(enumerator, enumvalue, + "enum %s: " % name) + prnt(' return %s;' % self._chained_list_constants[True]) + self._chained_list_constants[True] = funcname + '(lib)' + prnt('}') + prnt() + + _generate_cpy_enum_collecttype = _generate_nothing + _generate_cpy_enum_method = _generate_nothing + + def _loading_cpy_enum(self, tp, name, module): + if tp.partial: + enumvalues = [getattr(module, enumerator) + for enumerator in tp.enumerators] + tp.enumvalues = tuple(enumvalues) + tp.partial_resolved = True + + def _loaded_cpy_enum(self, tp, name, module, library): + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + setattr(library, enumerator, enumvalue) + + # ---------- + # macros: for now only for integers + + def _generate_cpy_macro_decl(self, tp, name): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + self._generate_cpy_const(True, name, check_value=check_value) + + _generate_cpy_macro_collecttype = _generate_nothing + _generate_cpy_macro_method = _generate_nothing + _loading_cpy_macro = _loaded_noop + _loaded_cpy_macro = _loaded_noop + + # ---------- + # global variables + + def _generate_cpy_variable_collecttype(self, tp, name): + if isinstance(tp, model.ArrayType): + tp_ptr = model.PointerType(tp.item) + else: + tp_ptr = model.PointerType(tp) + self._do_collect_type(tp_ptr) + + def _generate_cpy_variable_decl(self, tp, name): + if isinstance(tp, model.ArrayType): + tp_ptr = model.PointerType(tp.item) + self._generate_cpy_const(False, name, tp, vartp=tp_ptr, + size_too = tp.length_is_unknown()) + else: + tp_ptr = model.PointerType(tp) + self._generate_cpy_const(False, name, tp_ptr, category='var') + + _generate_cpy_variable_method = _generate_nothing + _loading_cpy_variable = _loaded_noop + + def _loaded_cpy_variable(self, tp, name, module, library): + value = getattr(library, name) + if isinstance(tp, model.ArrayType): # int a[5] is "constant" in the + # sense that "a=..." is forbidden + if tp.length_is_unknown(): + assert isinstance(value, tuple) + (value, size) = value + BItemType = self.ffi._get_cached_btype(tp.item) + length, rest = divmod(size, self.ffi.sizeof(BItemType)) + if rest != 0: + raise VerificationError( + "bad size: %r does not seem to be an array of %s" % + (name, tp.item)) + tp = tp.resolve_length(length) + # 'value' is a which we have to replace with + # a if the N is actually known + if tp.length is not None: + BArray = self.ffi._get_cached_btype(tp) + value = self.ffi.cast(BArray, value) + setattr(library, name, value) + return + # remove ptr= from the library instance, and replace + # it by a property on the class, which reads/writes into ptr[0]. + ptr = value + delattr(library, name) + def getter(library): + return ptr[0] + def setter(library, value): + ptr[0] = value + setattr(type(library), name, property(getter, setter)) + type(library)._cffi_dir.append(name) + + # ---------- + + def _generate_setup_custom(self): + prnt = self._prnt + prnt('static int _cffi_setup_custom(PyObject *lib)') + prnt('{') + prnt(' return %s;' % self._chained_list_constants[True]) + prnt('}') + +cffimod_header = r''' +#include +#include + +/* this block of #ifs should be kept exactly identical between + c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py + and cffi/_cffi_include.h */ +#if defined(_MSC_VER) +# include /* for alloca() */ +# if _MSC_VER < 1600 /* MSVC < 2010 */ + typedef __int8 int8_t; + typedef __int16 int16_t; + typedef __int32 int32_t; + typedef __int64 int64_t; + typedef unsigned __int8 uint8_t; + typedef unsigned __int16 uint16_t; + typedef unsigned __int32 uint32_t; + typedef unsigned __int64 uint64_t; + typedef __int8 int_least8_t; + typedef __int16 int_least16_t; + typedef __int32 int_least32_t; + typedef __int64 int_least64_t; + typedef unsigned __int8 uint_least8_t; + typedef unsigned __int16 uint_least16_t; + typedef unsigned __int32 uint_least32_t; + typedef unsigned __int64 uint_least64_t; + typedef __int8 int_fast8_t; + typedef __int16 int_fast16_t; + typedef __int32 int_fast32_t; + typedef __int64 int_fast64_t; + typedef unsigned __int8 uint_fast8_t; + typedef unsigned __int16 uint_fast16_t; + typedef unsigned __int32 uint_fast32_t; + typedef unsigned __int64 uint_fast64_t; + typedef __int64 intmax_t; + typedef unsigned __int64 uintmax_t; +# else +# include +# endif +# if _MSC_VER < 1800 /* MSVC < 2013 */ +# ifndef __cplusplus + typedef unsigned char _Bool; +# endif +# endif +# define _cffi_float_complex_t _Fcomplex /* include for it */ +# define _cffi_double_complex_t _Dcomplex /* include for it */ +#else +# include +# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux) +# include +# endif +# define _cffi_float_complex_t float _Complex +# define _cffi_double_complex_t double _Complex +#endif + +#if PY_MAJOR_VERSION < 3 +# undef PyCapsule_CheckExact +# undef PyCapsule_GetPointer +# define PyCapsule_CheckExact(capsule) (PyCObject_Check(capsule)) +# define PyCapsule_GetPointer(capsule, name) \ + (PyCObject_AsVoidPtr(capsule)) +#endif + +#if PY_MAJOR_VERSION >= 3 +# define PyInt_FromLong PyLong_FromLong +#endif + +#define _cffi_from_c_double PyFloat_FromDouble +#define _cffi_from_c_float PyFloat_FromDouble +#define _cffi_from_c_long PyInt_FromLong +#define _cffi_from_c_ulong PyLong_FromUnsignedLong +#define _cffi_from_c_longlong PyLong_FromLongLong +#define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong +#define _cffi_from_c__Bool PyBool_FromLong + +#define _cffi_to_c_double PyFloat_AsDouble +#define _cffi_to_c_float PyFloat_AsDouble + +#define _cffi_from_c_int_const(x) \ + (((x) > 0) ? \ + ((unsigned long long)(x) <= (unsigned long long)LONG_MAX) ? \ + PyInt_FromLong((long)(x)) : \ + PyLong_FromUnsignedLongLong((unsigned long long)(x)) : \ + ((long long)(x) >= (long long)LONG_MIN) ? \ + PyInt_FromLong((long)(x)) : \ + PyLong_FromLongLong((long long)(x))) + +#define _cffi_from_c_int(x, type) \ + (((type)-1) > 0 ? /* unsigned */ \ + (sizeof(type) < sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + sizeof(type) == sizeof(long) ? \ + PyLong_FromUnsignedLong((unsigned long)x) : \ + PyLong_FromUnsignedLongLong((unsigned long long)x)) : \ + (sizeof(type) <= sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + PyLong_FromLongLong((long long)x))) + +#define _cffi_to_c_int(o, type) \ + ((type)( \ + sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o) \ + : (type)_cffi_to_c_i8(o)) : \ + sizeof(type) == 2 ? (((type)-1) > 0 ? (type)_cffi_to_c_u16(o) \ + : (type)_cffi_to_c_i16(o)) : \ + sizeof(type) == 4 ? (((type)-1) > 0 ? (type)_cffi_to_c_u32(o) \ + : (type)_cffi_to_c_i32(o)) : \ + sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o) \ + : (type)_cffi_to_c_i64(o)) : \ + (Py_FatalError("unsupported size for type " #type), (type)0))) + +#define _cffi_to_c_i8 \ + ((int(*)(PyObject *))_cffi_exports[1]) +#define _cffi_to_c_u8 \ + ((int(*)(PyObject *))_cffi_exports[2]) +#define _cffi_to_c_i16 \ + ((int(*)(PyObject *))_cffi_exports[3]) +#define _cffi_to_c_u16 \ + ((int(*)(PyObject *))_cffi_exports[4]) +#define _cffi_to_c_i32 \ + ((int(*)(PyObject *))_cffi_exports[5]) +#define _cffi_to_c_u32 \ + ((unsigned int(*)(PyObject *))_cffi_exports[6]) +#define _cffi_to_c_i64 \ + ((long long(*)(PyObject *))_cffi_exports[7]) +#define _cffi_to_c_u64 \ + ((unsigned long long(*)(PyObject *))_cffi_exports[8]) +#define _cffi_to_c_char \ + ((int(*)(PyObject *))_cffi_exports[9]) +#define _cffi_from_c_pointer \ + ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[10]) +#define _cffi_to_c_pointer \ + ((char *(*)(PyObject *, CTypeDescrObject *))_cffi_exports[11]) +#define _cffi_get_struct_layout \ + ((PyObject *(*)(Py_ssize_t[]))_cffi_exports[12]) +#define _cffi_restore_errno \ + ((void(*)(void))_cffi_exports[13]) +#define _cffi_save_errno \ + ((void(*)(void))_cffi_exports[14]) +#define _cffi_from_c_char \ + ((PyObject *(*)(char))_cffi_exports[15]) +#define _cffi_from_c_deref \ + ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[16]) +#define _cffi_to_c \ + ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[17]) +#define _cffi_from_c_struct \ + ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[18]) +#define _cffi_to_c_wchar_t \ + ((wchar_t(*)(PyObject *))_cffi_exports[19]) +#define _cffi_from_c_wchar_t \ + ((PyObject *(*)(wchar_t))_cffi_exports[20]) +#define _cffi_to_c_long_double \ + ((long double(*)(PyObject *))_cffi_exports[21]) +#define _cffi_to_c__Bool \ + ((_Bool(*)(PyObject *))_cffi_exports[22]) +#define _cffi_prepare_pointer_call_argument \ + ((Py_ssize_t(*)(CTypeDescrObject *, PyObject *, char **))_cffi_exports[23]) +#define _cffi_convert_array_from_object \ + ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[24]) +#define _CFFI_NUM_EXPORTS 25 + +typedef struct _ctypedescr CTypeDescrObject; + +static void *_cffi_exports[_CFFI_NUM_EXPORTS]; +static PyObject *_cffi_types, *_cffi_VerificationError; + +static int _cffi_setup_custom(PyObject *lib); /* forward */ + +static PyObject *_cffi_setup(PyObject *self, PyObject *args) +{ + PyObject *library; + int was_alive = (_cffi_types != NULL); + (void)self; /* unused */ + if (!PyArg_ParseTuple(args, "OOO", &_cffi_types, &_cffi_VerificationError, + &library)) + return NULL; + Py_INCREF(_cffi_types); + Py_INCREF(_cffi_VerificationError); + if (_cffi_setup_custom(library) < 0) + return NULL; + return PyBool_FromLong(was_alive); +} + +union _cffi_union_alignment_u { + unsigned char m_char; + unsigned short m_short; + unsigned int m_int; + unsigned long m_long; + unsigned long long m_longlong; + float m_float; + double m_double; + long double m_longdouble; +}; + +struct _cffi_freeme_s { + struct _cffi_freeme_s *next; + union _cffi_union_alignment_u alignment; +}; + +#ifdef __GNUC__ + __attribute__((unused)) +#endif +static int _cffi_convert_array_argument(CTypeDescrObject *ctptr, PyObject *arg, + char **output_data, Py_ssize_t datasize, + struct _cffi_freeme_s **freeme) +{ + char *p; + if (datasize < 0) + return -1; + + p = *output_data; + if (p == NULL) { + struct _cffi_freeme_s *fp = (struct _cffi_freeme_s *)PyObject_Malloc( + offsetof(struct _cffi_freeme_s, alignment) + (size_t)datasize); + if (fp == NULL) + return -1; + fp->next = *freeme; + *freeme = fp; + p = *output_data = (char *)&fp->alignment; + } + memset((void *)p, 0, (size_t)datasize); + return _cffi_convert_array_from_object(p, ctptr, arg); +} + +#ifdef __GNUC__ + __attribute__((unused)) +#endif +static void _cffi_free_array_arguments(struct _cffi_freeme_s *freeme) +{ + do { + void *p = (void *)freeme; + freeme = freeme->next; + PyObject_Free(p); + } while (freeme != NULL); +} + +static int _cffi_init(void) +{ + PyObject *module, *c_api_object = NULL; + + module = PyImport_ImportModule("_cffi_backend"); + if (module == NULL) + goto failure; + + c_api_object = PyObject_GetAttrString(module, "_C_API"); + if (c_api_object == NULL) + goto failure; + if (!PyCapsule_CheckExact(c_api_object)) { + PyErr_SetNone(PyExc_ImportError); + goto failure; + } + memcpy(_cffi_exports, PyCapsule_GetPointer(c_api_object, "cffi"), + _CFFI_NUM_EXPORTS * sizeof(void *)); + + Py_DECREF(module); + Py_DECREF(c_api_object); + return 0; + + failure: + Py_XDECREF(module); + Py_XDECREF(c_api_object); + return -1; +} + +#define _cffi_type(num) ((CTypeDescrObject *)PyList_GET_ITEM(_cffi_types, num)) + +/**********/ +''' diff --git a/venv/lib/python3.12/site-packages/cffi/vengine_gen.py b/venv/lib/python3.12/site-packages/cffi/vengine_gen.py new file mode 100644 index 00000000..bffc8212 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi/vengine_gen.py @@ -0,0 +1,679 @@ +# +# DEPRECATED: implementation for ffi.verify() +# +import sys, os +import types + +from . import model +from .error import VerificationError + + +class VGenericEngine(object): + _class_key = 'g' + _gen_python_module = False + + def __init__(self, verifier): + self.verifier = verifier + self.ffi = verifier.ffi + self.export_symbols = [] + self._struct_pending_verification = {} + + def patch_extension_kwds(self, kwds): + # add 'export_symbols' to the dictionary. Note that we add the + # list before filling it. When we fill it, it will thus also show + # up in kwds['export_symbols']. + kwds.setdefault('export_symbols', self.export_symbols) + + def find_module(self, module_name, path, so_suffixes): + for so_suffix in so_suffixes: + basename = module_name + so_suffix + if path is None: + path = sys.path + for dirname in path: + filename = os.path.join(dirname, basename) + if os.path.isfile(filename): + return filename + + def collect_types(self): + pass # not needed in the generic engine + + def _prnt(self, what=''): + self._f.write(what + '\n') + + def write_source_to_f(self): + prnt = self._prnt + # first paste some standard set of lines that are mostly '#include' + prnt(cffimod_header) + # then paste the C source given by the user, verbatim. + prnt(self.verifier.preamble) + # + # call generate_gen_xxx_decl(), for every xxx found from + # ffi._parser._declarations. This generates all the functions. + self._generate('decl') + # + # on Windows, distutils insists on putting init_cffi_xyz in + # 'export_symbols', so instead of fighting it, just give up and + # give it one + if sys.platform == 'win32': + if sys.version_info >= (3,): + prefix = 'PyInit_' + else: + prefix = 'init' + modname = self.verifier.get_module_name() + prnt("void %s%s(void) { }\n" % (prefix, modname)) + + def load_library(self, flags=0): + # import it with the CFFI backend + backend = self.ffi._backend + # needs to make a path that contains '/', on Posix + filename = os.path.join(os.curdir, self.verifier.modulefilename) + module = backend.load_library(filename, flags) + # + # call loading_gen_struct() to get the struct layout inferred by + # the C compiler + self._load(module, 'loading') + + # build the FFILibrary class and instance, this is a module subclass + # because modules are expected to have usually-constant-attributes and + # in PyPy this means the JIT is able to treat attributes as constant, + # which we want. + class FFILibrary(types.ModuleType): + _cffi_generic_module = module + _cffi_ffi = self.ffi + _cffi_dir = [] + def __dir__(self): + return FFILibrary._cffi_dir + library = FFILibrary("") + # + # finally, call the loaded_gen_xxx() functions. This will set + # up the 'library' object. + self._load(module, 'loaded', library=library) + return library + + def _get_declarations(self): + lst = [(key, tp) for (key, (tp, qual)) in + self.ffi._parser._declarations.items()] + lst.sort() + return lst + + def _generate(self, step_name): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + try: + method = getattr(self, '_generate_gen_%s_%s' % (kind, + step_name)) + except AttributeError: + raise VerificationError( + "not implemented in verify(): %r" % name) + try: + method(tp, realname) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _load(self, module, step_name, **kwds): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + method = getattr(self, '_%s_gen_%s' % (step_name, kind)) + try: + method(tp, realname, module, **kwds) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _generate_nothing(self, tp, name): + pass + + def _loaded_noop(self, tp, name, module, **kwds): + pass + + # ---------- + # typedefs: generates no code so far + + _generate_gen_typedef_decl = _generate_nothing + _loading_gen_typedef = _loaded_noop + _loaded_gen_typedef = _loaded_noop + + # ---------- + # function declarations + + def _generate_gen_function_decl(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + # cannot support vararg functions better than this: check for its + # exact type (including the fixed arguments), and build it as a + # constant function pointer (no _cffi_f_%s wrapper) + self._generate_gen_const(False, name, tp) + return + prnt = self._prnt + numargs = len(tp.args) + argnames = [] + for i, type in enumerate(tp.args): + indirection = '' + if isinstance(type, model.StructOrUnion): + indirection = '*' + argnames.append('%sx%d' % (indirection, i)) + context = 'argument of %s' % name + arglist = [type.get_c_name(' %s' % arg, context) + for type, arg in zip(tp.args, argnames)] + tpresult = tp.result + if isinstance(tpresult, model.StructOrUnion): + arglist.insert(0, tpresult.get_c_name(' *r', context)) + tpresult = model.void_type + arglist = ', '.join(arglist) or 'void' + wrappername = '_cffi_f_%s' % name + self.export_symbols.append(wrappername) + if tp.abi: + abi = tp.abi + ' ' + else: + abi = '' + funcdecl = ' %s%s(%s)' % (abi, wrappername, arglist) + context = 'result of %s' % name + prnt(tpresult.get_c_name(funcdecl, context)) + prnt('{') + # + if isinstance(tp.result, model.StructOrUnion): + result_code = '*r = ' + elif not isinstance(tp.result, model.VoidType): + result_code = 'return ' + else: + result_code = '' + prnt(' %s%s(%s);' % (result_code, name, ', '.join(argnames))) + prnt('}') + prnt() + + _loading_gen_function = _loaded_noop + + def _loaded_gen_function(self, tp, name, module, library): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + newfunction = self._load_constant(False, tp, name, module) + else: + indirections = [] + base_tp = tp + if (any(isinstance(typ, model.StructOrUnion) for typ in tp.args) + or isinstance(tp.result, model.StructOrUnion)): + indirect_args = [] + for i, typ in enumerate(tp.args): + if isinstance(typ, model.StructOrUnion): + typ = model.PointerType(typ) + indirections.append((i, typ)) + indirect_args.append(typ) + indirect_result = tp.result + if isinstance(indirect_result, model.StructOrUnion): + if indirect_result.fldtypes is None: + raise TypeError("'%s' is used as result type, " + "but is opaque" % ( + indirect_result._get_c_name(),)) + indirect_result = model.PointerType(indirect_result) + indirect_args.insert(0, indirect_result) + indirections.insert(0, ("result", indirect_result)) + indirect_result = model.void_type + tp = model.FunctionPtrType(tuple(indirect_args), + indirect_result, tp.ellipsis) + BFunc = self.ffi._get_cached_btype(tp) + wrappername = '_cffi_f_%s' % name + newfunction = module.load_function(BFunc, wrappername) + for i, typ in indirections: + newfunction = self._make_struct_wrapper(newfunction, i, typ, + base_tp) + setattr(library, name, newfunction) + type(library)._cffi_dir.append(name) + + def _make_struct_wrapper(self, oldfunc, i, tp, base_tp): + backend = self.ffi._backend + BType = self.ffi._get_cached_btype(tp) + if i == "result": + ffi = self.ffi + def newfunc(*args): + res = ffi.new(BType) + oldfunc(res, *args) + return res[0] + else: + def newfunc(*args): + args = args[:i] + (backend.newp(BType, args[i]),) + args[i+1:] + return oldfunc(*args) + newfunc._cffi_base_type = base_tp + return newfunc + + # ---------- + # named structs + + def _generate_gen_struct_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'struct', name) + + def _loading_gen_struct(self, tp, name, module): + self._loading_struct_or_union(tp, 'struct', name, module) + + def _loaded_gen_struct(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + def _generate_gen_union_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'union', name) + + def _loading_gen_union(self, tp, name, module): + self._loading_struct_or_union(tp, 'union', name, module) + + def _loaded_gen_union(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + def _generate_struct_or_union_decl(self, tp, prefix, name): + if tp.fldnames is None: + return # nothing to do with opaque structs + checkfuncname = '_cffi_check_%s_%s' % (prefix, name) + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + cname = ('%s %s' % (prefix, name)).strip() + # + prnt = self._prnt + prnt('static void %s(%s *p)' % (checkfuncname, cname)) + prnt('{') + prnt(' /* only to generate compile-time warnings or errors */') + prnt(' (void)p;') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if (isinstance(ftype, model.PrimitiveType) + and ftype.is_integer_type()) or fbitsize >= 0: + # accept all integers, but complain on float or double + prnt(' (void)((p->%s) << 1);' % fname) + else: + # only accept exactly the type declared. + try: + prnt(' { %s = &p->%s; (void)tmp; }' % ( + ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual), + fname)) + except VerificationError as e: + prnt(' /* %s */' % str(e)) # cannot verify it, ignore + prnt('}') + self.export_symbols.append(layoutfuncname) + prnt('intptr_t %s(intptr_t i)' % (layoutfuncname,)) + prnt('{') + prnt(' struct _cffi_aligncheck { char x; %s y; };' % cname) + prnt(' static intptr_t nums[] = {') + prnt(' sizeof(%s),' % cname) + prnt(' offsetof(struct _cffi_aligncheck, y),') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + prnt(' offsetof(%s, %s),' % (cname, fname)) + if isinstance(ftype, model.ArrayType) and ftype.length is None: + prnt(' 0, /* %s */' % ftype._get_c_name()) + else: + prnt(' sizeof(((%s *)0)->%s),' % (cname, fname)) + prnt(' -1') + prnt(' };') + prnt(' return nums[i];') + prnt(' /* the next line is not executed, but compiled */') + prnt(' %s(0);' % (checkfuncname,)) + prnt('}') + prnt() + + def _loading_struct_or_union(self, tp, prefix, name, module): + if tp.fldnames is None: + return # nothing to do with opaque structs + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + # + BFunc = self.ffi._typeof_locked("intptr_t(*)(intptr_t)")[0] + function = module.load_function(BFunc, layoutfuncname) + layout = [] + num = 0 + while True: + x = function(num) + if x < 0: break + layout.append(x) + num += 1 + if isinstance(tp, model.StructOrUnion) and tp.partial: + # use the function()'s sizes and offsets to guide the + # layout of the struct + totalsize = layout[0] + totalalignment = layout[1] + fieldofs = layout[2::2] + fieldsize = layout[3::2] + tp.force_flatten() + assert len(fieldofs) == len(fieldsize) == len(tp.fldnames) + tp.fixedlayout = fieldofs, fieldsize, totalsize, totalalignment + else: + cname = ('%s %s' % (prefix, name)).strip() + self._struct_pending_verification[tp] = layout, cname + + def _loaded_struct_or_union(self, tp): + if tp.fldnames is None: + return # nothing to do with opaque structs + self.ffi._get_cached_btype(tp) # force 'fixedlayout' to be considered + + if tp in self._struct_pending_verification: + # check that the layout sizes and offsets match the real ones + def check(realvalue, expectedvalue, msg): + if realvalue != expectedvalue: + raise VerificationError( + "%s (we have %d, but C compiler says %d)" + % (msg, expectedvalue, realvalue)) + ffi = self.ffi + BStruct = ffi._get_cached_btype(tp) + layout, cname = self._struct_pending_verification.pop(tp) + check(layout[0], ffi.sizeof(BStruct), "wrong total size") + check(layout[1], ffi.alignof(BStruct), "wrong total alignment") + i = 2 + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + check(layout[i], ffi.offsetof(BStruct, fname), + "wrong offset for field %r" % (fname,)) + if layout[i+1] != 0: + BField = ffi._get_cached_btype(ftype) + check(layout[i+1], ffi.sizeof(BField), + "wrong size for field %r" % (fname,)) + i += 2 + assert i == len(layout) + + # ---------- + # 'anonymous' declarations. These are produced for anonymous structs + # or unions; the 'name' is obtained by a typedef. + + def _generate_gen_anonymous_decl(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_gen_enum_decl(tp, name, '') + else: + self._generate_struct_or_union_decl(tp, '', name) + + def _loading_gen_anonymous(self, tp, name, module): + if isinstance(tp, model.EnumType): + self._loading_gen_enum(tp, name, module, '') + else: + self._loading_struct_or_union(tp, '', name, module) + + def _loaded_gen_anonymous(self, tp, name, module, **kwds): + if isinstance(tp, model.EnumType): + self._loaded_gen_enum(tp, name, module, **kwds) + else: + self._loaded_struct_or_union(tp) + + # ---------- + # constants, likely declared with '#define' + + def _generate_gen_const(self, is_int, name, tp=None, category='const', + check_value=None): + prnt = self._prnt + funcname = '_cffi_%s_%s' % (category, name) + self.export_symbols.append(funcname) + if check_value is not None: + assert is_int + assert category == 'const' + prnt('int %s(char *out_error)' % funcname) + prnt('{') + self._check_int_constant_value(name, check_value) + prnt(' return 0;') + prnt('}') + elif is_int: + assert category == 'const' + prnt('int %s(long long *out_value)' % funcname) + prnt('{') + prnt(' *out_value = (long long)(%s);' % (name,)) + prnt(' return (%s) <= 0;' % (name,)) + prnt('}') + else: + assert tp is not None + assert check_value is None + if category == 'var': + ampersand = '&' + else: + ampersand = '' + extra = '' + if category == 'const' and isinstance(tp, model.StructOrUnion): + extra = 'const *' + ampersand = '&' + prnt(tp.get_c_name(' %s%s(void)' % (extra, funcname), name)) + prnt('{') + prnt(' return (%s%s);' % (ampersand, name)) + prnt('}') + prnt() + + def _generate_gen_constant_decl(self, tp, name): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + self._generate_gen_const(is_int, name, tp) + + _loading_gen_constant = _loaded_noop + + def _load_constant(self, is_int, tp, name, module, check_value=None): + funcname = '_cffi_const_%s' % name + if check_value is not None: + assert is_int + self._load_known_int_constant(module, funcname) + value = check_value + elif is_int: + BType = self.ffi._typeof_locked("long long*")[0] + BFunc = self.ffi._typeof_locked("int(*)(long long*)")[0] + function = module.load_function(BFunc, funcname) + p = self.ffi.new(BType) + negative = function(p) + value = int(p[0]) + if value < 0 and not negative: + BLongLong = self.ffi._typeof_locked("long long")[0] + value += (1 << (8*self.ffi.sizeof(BLongLong))) + else: + assert check_value is None + fntypeextra = '(*)(void)' + if isinstance(tp, model.StructOrUnion): + fntypeextra = '*' + fntypeextra + BFunc = self.ffi._typeof_locked(tp.get_c_name(fntypeextra, name))[0] + function = module.load_function(BFunc, funcname) + value = function() + if isinstance(tp, model.StructOrUnion): + value = value[0] + return value + + def _loaded_gen_constant(self, tp, name, module, library): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + value = self._load_constant(is_int, tp, name, module) + setattr(library, name, value) + type(library)._cffi_dir.append(name) + + # ---------- + # enums + + def _check_int_constant_value(self, name, value): + prnt = self._prnt + if value <= 0: + prnt(' if ((%s) > 0 || (long)(%s) != %dL) {' % ( + name, name, value)) + else: + prnt(' if ((%s) <= 0 || (unsigned long)(%s) != %dUL) {' % ( + name, name, value)) + prnt(' char buf[64];') + prnt(' if ((%s) <= 0)' % name) + prnt(' sprintf(buf, "%%ld", (long)(%s));' % name) + prnt(' else') + prnt(' sprintf(buf, "%%lu", (unsigned long)(%s));' % + name) + prnt(' sprintf(out_error, "%s has the real value %s, not %s",') + prnt(' "%s", buf, "%d");' % (name[:100], value)) + prnt(' return -1;') + prnt(' }') + + def _load_known_int_constant(self, module, funcname): + BType = self.ffi._typeof_locked("char[]")[0] + BFunc = self.ffi._typeof_locked("int(*)(char*)")[0] + function = module.load_function(BFunc, funcname) + p = self.ffi.new(BType, 256) + if function(p) < 0: + error = self.ffi.string(p) + if sys.version_info >= (3,): + error = str(error, 'utf-8') + raise VerificationError(error) + + def _enum_funcname(self, prefix, name): + # "$enum_$1" => "___D_enum____D_1" + name = name.replace('$', '___D_') + return '_cffi_e_%s_%s' % (prefix, name) + + def _generate_gen_enum_decl(self, tp, name, prefix='enum'): + if tp.partial: + for enumerator in tp.enumerators: + self._generate_gen_const(True, enumerator) + return + # + funcname = self._enum_funcname(prefix, name) + self.export_symbols.append(funcname) + prnt = self._prnt + prnt('int %s(char *out_error)' % funcname) + prnt('{') + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + self._check_int_constant_value(enumerator, enumvalue) + prnt(' return 0;') + prnt('}') + prnt() + + def _loading_gen_enum(self, tp, name, module, prefix='enum'): + if tp.partial: + enumvalues = [self._load_constant(True, tp, enumerator, module) + for enumerator in tp.enumerators] + tp.enumvalues = tuple(enumvalues) + tp.partial_resolved = True + else: + funcname = self._enum_funcname(prefix, name) + self._load_known_int_constant(module, funcname) + + def _loaded_gen_enum(self, tp, name, module, library): + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + setattr(library, enumerator, enumvalue) + type(library)._cffi_dir.append(enumerator) + + # ---------- + # macros: for now only for integers + + def _generate_gen_macro_decl(self, tp, name): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + self._generate_gen_const(True, name, check_value=check_value) + + _loading_gen_macro = _loaded_noop + + def _loaded_gen_macro(self, tp, name, module, library): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + value = self._load_constant(True, tp, name, module, + check_value=check_value) + setattr(library, name, value) + type(library)._cffi_dir.append(name) + + # ---------- + # global variables + + def _generate_gen_variable_decl(self, tp, name): + if isinstance(tp, model.ArrayType): + if tp.length_is_unknown(): + prnt = self._prnt + funcname = '_cffi_sizeof_%s' % (name,) + self.export_symbols.append(funcname) + prnt("size_t %s(void)" % funcname) + prnt("{") + prnt(" return sizeof(%s);" % (name,)) + prnt("}") + tp_ptr = model.PointerType(tp.item) + self._generate_gen_const(False, name, tp_ptr) + else: + tp_ptr = model.PointerType(tp) + self._generate_gen_const(False, name, tp_ptr, category='var') + + _loading_gen_variable = _loaded_noop + + def _loaded_gen_variable(self, tp, name, module, library): + if isinstance(tp, model.ArrayType): # int a[5] is "constant" in the + # sense that "a=..." is forbidden + if tp.length_is_unknown(): + funcname = '_cffi_sizeof_%s' % (name,) + BFunc = self.ffi._typeof_locked('size_t(*)(void)')[0] + function = module.load_function(BFunc, funcname) + size = function() + BItemType = self.ffi._get_cached_btype(tp.item) + length, rest = divmod(size, self.ffi.sizeof(BItemType)) + if rest != 0: + raise VerificationError( + "bad size: %r does not seem to be an array of %s" % + (name, tp.item)) + tp = tp.resolve_length(length) + tp_ptr = model.PointerType(tp.item) + value = self._load_constant(False, tp_ptr, name, module) + # 'value' is a which we have to replace with + # a if the N is actually known + if tp.length is not None: + BArray = self.ffi._get_cached_btype(tp) + value = self.ffi.cast(BArray, value) + setattr(library, name, value) + type(library)._cffi_dir.append(name) + return + # remove ptr= from the library instance, and replace + # it by a property on the class, which reads/writes into ptr[0]. + funcname = '_cffi_var_%s' % name + BFunc = self.ffi._typeof_locked(tp.get_c_name('*(*)(void)', name))[0] + function = module.load_function(BFunc, funcname) + ptr = function() + def getter(library): + return ptr[0] + def setter(library, value): + ptr[0] = value + setattr(type(library), name, property(getter, setter)) + type(library)._cffi_dir.append(name) + +cffimod_header = r''' +#include +#include +#include +#include +#include /* XXX for ssize_t on some platforms */ + +/* this block of #ifs should be kept exactly identical between + c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py + and cffi/_cffi_include.h */ +#if defined(_MSC_VER) +# include /* for alloca() */ +# if _MSC_VER < 1600 /* MSVC < 2010 */ + typedef __int8 int8_t; + typedef __int16 int16_t; + typedef __int32 int32_t; + typedef __int64 int64_t; + typedef unsigned __int8 uint8_t; + typedef unsigned __int16 uint16_t; + typedef unsigned __int32 uint32_t; + typedef unsigned __int64 uint64_t; + typedef __int8 int_least8_t; + typedef __int16 int_least16_t; + typedef __int32 int_least32_t; + typedef __int64 int_least64_t; + typedef unsigned __int8 uint_least8_t; + typedef unsigned __int16 uint_least16_t; + typedef unsigned __int32 uint_least32_t; + typedef unsigned __int64 uint_least64_t; + typedef __int8 int_fast8_t; + typedef __int16 int_fast16_t; + typedef __int32 int_fast32_t; + typedef __int64 int_fast64_t; + typedef unsigned __int8 uint_fast8_t; + typedef unsigned __int16 uint_fast16_t; + typedef unsigned __int32 uint_fast32_t; + typedef unsigned __int64 uint_fast64_t; + typedef __int64 intmax_t; + typedef unsigned __int64 uintmax_t; +# else +# include +# endif +# if _MSC_VER < 1800 /* MSVC < 2013 */ +# ifndef __cplusplus + typedef unsigned char _Bool; +# endif +# endif +# define _cffi_float_complex_t _Fcomplex /* include for it */ +# define _cffi_double_complex_t _Dcomplex /* include for it */ +#else +# include +# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux) +# include +# endif +# define _cffi_float_complex_t float _Complex +# define _cffi_double_complex_t double _Complex +#endif +''' diff --git a/venv/lib/python3.12/site-packages/cffi/verifier.py b/venv/lib/python3.12/site-packages/cffi/verifier.py new file mode 100644 index 00000000..e392a2b7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi/verifier.py @@ -0,0 +1,306 @@ +# +# DEPRECATED: implementation for ffi.verify() +# +import sys, os, binascii, shutil, io +from . import __version_verifier_modules__ +from . import ffiplatform +from .error import VerificationError + +if sys.version_info >= (3, 3): + import importlib.machinery + def _extension_suffixes(): + return importlib.machinery.EXTENSION_SUFFIXES[:] +else: + import imp + def _extension_suffixes(): + return [suffix for suffix, _, type in imp.get_suffixes() + if type == imp.C_EXTENSION] + + +if sys.version_info >= (3,): + NativeIO = io.StringIO +else: + class NativeIO(io.BytesIO): + def write(self, s): + if isinstance(s, unicode): + s = s.encode('ascii') + super(NativeIO, self).write(s) + + +class Verifier(object): + + def __init__(self, ffi, preamble, tmpdir=None, modulename=None, + ext_package=None, tag='', force_generic_engine=False, + source_extension='.c', flags=None, relative_to=None, **kwds): + if ffi._parser._uses_new_feature: + raise VerificationError( + "feature not supported with ffi.verify(), but only " + "with ffi.set_source(): %s" % (ffi._parser._uses_new_feature,)) + self.ffi = ffi + self.preamble = preamble + if not modulename: + flattened_kwds = ffiplatform.flatten(kwds) + vengine_class = _locate_engine_class(ffi, force_generic_engine) + self._vengine = vengine_class(self) + self._vengine.patch_extension_kwds(kwds) + self.flags = flags + self.kwds = self.make_relative_to(kwds, relative_to) + # + if modulename: + if tag: + raise TypeError("can't specify both 'modulename' and 'tag'") + else: + key = '\x00'.join(['%d.%d' % sys.version_info[:2], + __version_verifier_modules__, + preamble, flattened_kwds] + + ffi._cdefsources) + if sys.version_info >= (3,): + key = key.encode('utf-8') + k1 = hex(binascii.crc32(key[0::2]) & 0xffffffff) + k1 = k1.lstrip('0x').rstrip('L') + k2 = hex(binascii.crc32(key[1::2]) & 0xffffffff) + k2 = k2.lstrip('0').rstrip('L') + modulename = '_cffi_%s_%s%s%s' % (tag, self._vengine._class_key, + k1, k2) + suffix = _get_so_suffixes()[0] + self.tmpdir = tmpdir or _caller_dir_pycache() + self.sourcefilename = os.path.join(self.tmpdir, modulename + source_extension) + self.modulefilename = os.path.join(self.tmpdir, modulename + suffix) + self.ext_package = ext_package + self._has_source = False + self._has_module = False + + def write_source(self, file=None): + """Write the C source code. It is produced in 'self.sourcefilename', + which can be tweaked beforehand.""" + with self.ffi._lock: + if self._has_source and file is None: + raise VerificationError( + "source code already written") + self._write_source(file) + + def compile_module(self): + """Write the C source code (if not done already) and compile it. + This produces a dynamic link library in 'self.modulefilename'.""" + with self.ffi._lock: + if self._has_module: + raise VerificationError("module already compiled") + if not self._has_source: + self._write_source() + self._compile_module() + + def load_library(self): + """Get a C module from this Verifier instance. + Returns an instance of a FFILibrary class that behaves like the + objects returned by ffi.dlopen(), but that delegates all + operations to the C module. If necessary, the C code is written + and compiled first. + """ + with self.ffi._lock: + if not self._has_module: + self._locate_module() + if not self._has_module: + if not self._has_source: + self._write_source() + self._compile_module() + return self._load_library() + + def get_module_name(self): + basename = os.path.basename(self.modulefilename) + # kill both the .so extension and the other .'s, as introduced + # by Python 3: 'basename.cpython-33m.so' + basename = basename.split('.', 1)[0] + # and the _d added in Python 2 debug builds --- but try to be + # conservative and not kill a legitimate _d + if basename.endswith('_d') and hasattr(sys, 'gettotalrefcount'): + basename = basename[:-2] + return basename + + def get_extension(self): + if not self._has_source: + with self.ffi._lock: + if not self._has_source: + self._write_source() + sourcename = ffiplatform.maybe_relative_path(self.sourcefilename) + modname = self.get_module_name() + return ffiplatform.get_extension(sourcename, modname, **self.kwds) + + def generates_python_module(self): + return self._vengine._gen_python_module + + def make_relative_to(self, kwds, relative_to): + if relative_to and os.path.dirname(relative_to): + dirname = os.path.dirname(relative_to) + kwds = kwds.copy() + for key in ffiplatform.LIST_OF_FILE_NAMES: + if key in kwds: + lst = kwds[key] + if not isinstance(lst, (list, tuple)): + raise TypeError("keyword '%s' should be a list or tuple" + % (key,)) + lst = [os.path.join(dirname, fn) for fn in lst] + kwds[key] = lst + return kwds + + # ---------- + + def _locate_module(self): + if not os.path.isfile(self.modulefilename): + if self.ext_package: + try: + pkg = __import__(self.ext_package, None, None, ['__doc__']) + except ImportError: + return # cannot import the package itself, give up + # (e.g. it might be called differently before installation) + path = pkg.__path__ + else: + path = None + filename = self._vengine.find_module(self.get_module_name(), path, + _get_so_suffixes()) + if filename is None: + return + self.modulefilename = filename + self._vengine.collect_types() + self._has_module = True + + def _write_source_to(self, file): + self._vengine._f = file + try: + self._vengine.write_source_to_f() + finally: + del self._vengine._f + + def _write_source(self, file=None): + if file is not None: + self._write_source_to(file) + else: + # Write our source file to an in memory file. + f = NativeIO() + self._write_source_to(f) + source_data = f.getvalue() + + # Determine if this matches the current file + if os.path.exists(self.sourcefilename): + with open(self.sourcefilename, "r") as fp: + needs_written = not (fp.read() == source_data) + else: + needs_written = True + + # Actually write the file out if it doesn't match + if needs_written: + _ensure_dir(self.sourcefilename) + with open(self.sourcefilename, "w") as fp: + fp.write(source_data) + + # Set this flag + self._has_source = True + + def _compile_module(self): + # compile this C source + tmpdir = os.path.dirname(self.sourcefilename) + outputfilename = ffiplatform.compile(tmpdir, self.get_extension()) + try: + same = ffiplatform.samefile(outputfilename, self.modulefilename) + except OSError: + same = False + if not same: + _ensure_dir(self.modulefilename) + shutil.move(outputfilename, self.modulefilename) + self._has_module = True + + def _load_library(self): + assert self._has_module + if self.flags is not None: + return self._vengine.load_library(self.flags) + else: + return self._vengine.load_library() + +# ____________________________________________________________ + +_FORCE_GENERIC_ENGINE = False # for tests + +def _locate_engine_class(ffi, force_generic_engine): + if _FORCE_GENERIC_ENGINE: + force_generic_engine = True + if not force_generic_engine: + if '__pypy__' in sys.builtin_module_names: + force_generic_engine = True + else: + try: + import _cffi_backend + except ImportError: + _cffi_backend = '?' + if ffi._backend is not _cffi_backend: + force_generic_engine = True + if force_generic_engine: + from . import vengine_gen + return vengine_gen.VGenericEngine + else: + from . import vengine_cpy + return vengine_cpy.VCPythonEngine + +# ____________________________________________________________ + +_TMPDIR = None + +def _caller_dir_pycache(): + if _TMPDIR: + return _TMPDIR + result = os.environ.get('CFFI_TMPDIR') + if result: + return result + filename = sys._getframe(2).f_code.co_filename + return os.path.abspath(os.path.join(os.path.dirname(filename), + '__pycache__')) + +def set_tmpdir(dirname): + """Set the temporary directory to use instead of __pycache__.""" + global _TMPDIR + _TMPDIR = dirname + +def cleanup_tmpdir(tmpdir=None, keep_so=False): + """Clean up the temporary directory by removing all files in it + called `_cffi_*.{c,so}` as well as the `build` subdirectory.""" + tmpdir = tmpdir or _caller_dir_pycache() + try: + filelist = os.listdir(tmpdir) + except OSError: + return + if keep_so: + suffix = '.c' # only remove .c files + else: + suffix = _get_so_suffixes()[0].lower() + for fn in filelist: + if fn.lower().startswith('_cffi_') and ( + fn.lower().endswith(suffix) or fn.lower().endswith('.c')): + try: + os.unlink(os.path.join(tmpdir, fn)) + except OSError: + pass + clean_dir = [os.path.join(tmpdir, 'build')] + for dir in clean_dir: + try: + for fn in os.listdir(dir): + fn = os.path.join(dir, fn) + if os.path.isdir(fn): + clean_dir.append(fn) + else: + os.unlink(fn) + except OSError: + pass + +def _get_so_suffixes(): + suffixes = _extension_suffixes() + if not suffixes: + # bah, no C_EXTENSION available. Occurs on pypy without cpyext + if sys.platform == 'win32': + suffixes = [".pyd"] + else: + suffixes = [".so"] + + return suffixes + +def _ensure_dir(filename): + dirname = os.path.dirname(filename) + if dirname and not os.path.isdir(dirname): + os.makedirs(dirname) diff --git a/venv/lib/python3.12/site-packages/werkzeug-3.1.3.dist-info/INSTALLER b/venv/lib/python3.12/site-packages/click-8.3.1.dist-info/INSTALLER similarity index 100% rename from venv/lib/python3.12/site-packages/werkzeug-3.1.3.dist-info/INSTALLER rename to venv/lib/python3.12/site-packages/click-8.3.1.dist-info/INSTALLER diff --git a/venv/lib/python3.12/site-packages/click-8.3.0.dist-info/METADATA b/venv/lib/python3.12/site-packages/click-8.3.1.dist-info/METADATA similarity index 99% rename from venv/lib/python3.12/site-packages/click-8.3.0.dist-info/METADATA rename to venv/lib/python3.12/site-packages/click-8.3.1.dist-info/METADATA index 534eb572..3f433afb 100644 --- a/venv/lib/python3.12/site-packages/click-8.3.0.dist-info/METADATA +++ b/venv/lib/python3.12/site-packages/click-8.3.1.dist-info/METADATA @@ -1,6 +1,6 @@ Metadata-Version: 2.4 Name: click -Version: 8.3.0 +Version: 8.3.1 Summary: Composable command line interface toolkit Maintainer-email: Pallets Requires-Python: >=3.10 diff --git a/venv/lib/python3.12/site-packages/click-8.3.0.dist-info/RECORD b/venv/lib/python3.12/site-packages/click-8.3.1.dist-info/RECORD similarity index 77% rename from venv/lib/python3.12/site-packages/click-8.3.0.dist-info/RECORD rename to venv/lib/python3.12/site-packages/click-8.3.1.dist-info/RECORD index dc3c2be0..77e5c989 100644 --- a/venv/lib/python3.12/site-packages/click-8.3.0.dist-info/RECORD +++ b/venv/lib/python3.12/site-packages/click-8.3.1.dist-info/RECORD @@ -1,8 +1,8 @@ -click-8.3.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -click-8.3.0.dist-info/METADATA,sha256=P6vpEHZ_MLBt4SO2eB-QaadcOdiznkzaZtJImRo7_V4,2621 -click-8.3.0.dist-info/RECORD,, -click-8.3.0.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82 -click-8.3.0.dist-info/licenses/LICENSE.txt,sha256=morRBqOU6FO_4h9C9OctWSgZoigF2ZG18ydQKSkrZY0,1475 +click-8.3.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +click-8.3.1.dist-info/METADATA,sha256=XZeBrMAE0ghTE88SjfrSDuSyNCpBPplxJR1tbwD9oZg,2621 +click-8.3.1.dist-info/RECORD,, +click-8.3.1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82 +click-8.3.1.dist-info/licenses/LICENSE.txt,sha256=morRBqOU6FO_4h9C9OctWSgZoigF2ZG18ydQKSkrZY0,1475 click/__init__.py,sha256=6YyS1aeyknZ0LYweWozNZy0A9nZ_11wmYIhv3cbQrYo,4473 click/__pycache__/__init__.cpython-312.pyc,, click/__pycache__/_compat.cpython-312.pyc,, @@ -22,11 +22,11 @@ click/__pycache__/testing.cpython-312.pyc,, click/__pycache__/types.cpython-312.pyc,, click/__pycache__/utils.cpython-312.pyc,, click/_compat.py,sha256=v3xBZkFbvA1BXPRkFfBJc6-pIwPI7345m-kQEnpVAs4,18693 -click/_termui_impl.py,sha256=ktpAHyJtNkhyR-x64CQFD6xJQI11fTA3qg2AV3iCToU,26799 +click/_termui_impl.py,sha256=rgCb3On8X5A4200rA5L6i13u5iapmFer7sru57Jy6zA,27093 click/_textwrap.py,sha256=BOae0RQ6vg3FkNgSJyOoGzG1meGMxJ_ukWVZKx_v-0o,1400 click/_utils.py,sha256=kZwtTf5gMuCilJJceS2iTCvRvCY-0aN5rJq8gKw7p8g,943 click/_winconsole.py,sha256=_vxUuUaxwBhoR0vUWCNuHY8VUefiMdCIyU2SXPqoF-A,8465 -click/core.py,sha256=1A5T8UoAXklIGPTJ83_DJbVi35ehtJS2FTkP_wQ7es0,128855 +click/core.py,sha256=U6Bfxt8GkjNDqyJ0HqXvluJHtyZ4sY5USAvM1Cdq7mQ,132105 click/decorators.py,sha256=5P7abhJtAQYp_KHgjUvhMv464ERwOzrv2enNknlwHyQ,18461 click/exceptions.py,sha256=8utf8w6V5hJXMnO_ic1FNrtbwuEn1NUu1aDwV8UqnG4,9954 click/formatting.py,sha256=RVfwwr0rwWNpgGr8NaHodPzkIr7_tUyVh_nDdanLMNc,9730 @@ -34,7 +34,7 @@ click/globals.py,sha256=gM-Nh6A4M0HB_SgkaF5M4ncGGMDHc_flHXu9_oh4GEU,1923 click/parser.py,sha256=Q31pH0FlQZEq-UXE_ABRzlygEfvxPTuZbWNh4xfXmzw,19010 click/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 click/shell_completion.py,sha256=Cc4GQUFuWpfQBa9sF5qXeeYI7n3tI_1k6ZdSn4BZbT0,20994 -click/termui.py,sha256=vAYrKC2a7f_NfEIhAThEVYfa__ib5XQbTSCGtJlABRA,30847 +click/termui.py,sha256=hqCEjNndU-nzW08nRAkBaVgfZp_FdCA9KxfIWlKYaMc,31037 click/testing.py,sha256=EERbzcl1br0mW0qBS9EqkknfNfXB9WQEW0ELIpkvuSs,19102 click/types.py,sha256=ek54BNSFwPKsqtfT7jsqcc4WHui8AIFVMKM4oVZIXhc,39927 click/utils.py,sha256=gCUoewdAhA-QLBUUHxrLh4uj6m7T1WjZZMNPvR0I7YA,20257 diff --git a/venv/lib/python3.12/site-packages/click-8.3.0.dist-info/WHEEL b/venv/lib/python3.12/site-packages/click-8.3.1.dist-info/WHEEL similarity index 100% rename from venv/lib/python3.12/site-packages/click-8.3.0.dist-info/WHEEL rename to venv/lib/python3.12/site-packages/click-8.3.1.dist-info/WHEEL diff --git a/venv/lib/python3.12/site-packages/click-8.3.0.dist-info/licenses/LICENSE.txt b/venv/lib/python3.12/site-packages/click-8.3.1.dist-info/licenses/LICENSE.txt similarity index 100% rename from venv/lib/python3.12/site-packages/click-8.3.0.dist-info/licenses/LICENSE.txt rename to venv/lib/python3.12/site-packages/click-8.3.1.dist-info/licenses/LICENSE.txt diff --git a/venv/lib/python3.12/site-packages/click/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/click/__pycache__/__init__.cpython-312.pyc index 1b91aa25..963d814d 100644 Binary files a/venv/lib/python3.12/site-packages/click/__pycache__/__init__.cpython-312.pyc and b/venv/lib/python3.12/site-packages/click/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/click/__pycache__/_compat.cpython-312.pyc b/venv/lib/python3.12/site-packages/click/__pycache__/_compat.cpython-312.pyc index 89f7289a..61b5afb2 100644 Binary files a/venv/lib/python3.12/site-packages/click/__pycache__/_compat.cpython-312.pyc and b/venv/lib/python3.12/site-packages/click/__pycache__/_compat.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/click/__pycache__/_termui_impl.cpython-312.pyc b/venv/lib/python3.12/site-packages/click/__pycache__/_termui_impl.cpython-312.pyc index 3732aa3e..3fd159b2 100644 Binary files a/venv/lib/python3.12/site-packages/click/__pycache__/_termui_impl.cpython-312.pyc and b/venv/lib/python3.12/site-packages/click/__pycache__/_termui_impl.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/click/__pycache__/_textwrap.cpython-312.pyc b/venv/lib/python3.12/site-packages/click/__pycache__/_textwrap.cpython-312.pyc index dfdf4f28..63c548e3 100644 Binary files a/venv/lib/python3.12/site-packages/click/__pycache__/_textwrap.cpython-312.pyc and b/venv/lib/python3.12/site-packages/click/__pycache__/_textwrap.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/click/__pycache__/_utils.cpython-312.pyc b/venv/lib/python3.12/site-packages/click/__pycache__/_utils.cpython-312.pyc index 90c8bb16..0408a7ab 100644 Binary files a/venv/lib/python3.12/site-packages/click/__pycache__/_utils.cpython-312.pyc and b/venv/lib/python3.12/site-packages/click/__pycache__/_utils.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/click/__pycache__/_winconsole.cpython-312.pyc b/venv/lib/python3.12/site-packages/click/__pycache__/_winconsole.cpython-312.pyc index 9e6279f5..aaf2a400 100644 Binary files a/venv/lib/python3.12/site-packages/click/__pycache__/_winconsole.cpython-312.pyc and b/venv/lib/python3.12/site-packages/click/__pycache__/_winconsole.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/click/__pycache__/core.cpython-312.pyc b/venv/lib/python3.12/site-packages/click/__pycache__/core.cpython-312.pyc index c4f41946..b41735c2 100644 Binary files a/venv/lib/python3.12/site-packages/click/__pycache__/core.cpython-312.pyc and b/venv/lib/python3.12/site-packages/click/__pycache__/core.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/click/__pycache__/decorators.cpython-312.pyc b/venv/lib/python3.12/site-packages/click/__pycache__/decorators.cpython-312.pyc index c6a9cac4..afefcfb9 100644 Binary files a/venv/lib/python3.12/site-packages/click/__pycache__/decorators.cpython-312.pyc and b/venv/lib/python3.12/site-packages/click/__pycache__/decorators.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/click/__pycache__/exceptions.cpython-312.pyc b/venv/lib/python3.12/site-packages/click/__pycache__/exceptions.cpython-312.pyc index d811fc86..49bbff03 100644 Binary files a/venv/lib/python3.12/site-packages/click/__pycache__/exceptions.cpython-312.pyc and b/venv/lib/python3.12/site-packages/click/__pycache__/exceptions.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/click/__pycache__/formatting.cpython-312.pyc b/venv/lib/python3.12/site-packages/click/__pycache__/formatting.cpython-312.pyc index 90ead78d..5cfd1af6 100644 Binary files a/venv/lib/python3.12/site-packages/click/__pycache__/formatting.cpython-312.pyc and b/venv/lib/python3.12/site-packages/click/__pycache__/formatting.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/click/__pycache__/globals.cpython-312.pyc b/venv/lib/python3.12/site-packages/click/__pycache__/globals.cpython-312.pyc index c2ffcc84..5f008bae 100644 Binary files a/venv/lib/python3.12/site-packages/click/__pycache__/globals.cpython-312.pyc and b/venv/lib/python3.12/site-packages/click/__pycache__/globals.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/click/__pycache__/parser.cpython-312.pyc b/venv/lib/python3.12/site-packages/click/__pycache__/parser.cpython-312.pyc index a1b5350a..16870ef9 100644 Binary files a/venv/lib/python3.12/site-packages/click/__pycache__/parser.cpython-312.pyc and b/venv/lib/python3.12/site-packages/click/__pycache__/parser.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/click/__pycache__/shell_completion.cpython-312.pyc b/venv/lib/python3.12/site-packages/click/__pycache__/shell_completion.cpython-312.pyc index 08a659ec..176661e2 100644 Binary files a/venv/lib/python3.12/site-packages/click/__pycache__/shell_completion.cpython-312.pyc and b/venv/lib/python3.12/site-packages/click/__pycache__/shell_completion.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/click/__pycache__/termui.cpython-312.pyc b/venv/lib/python3.12/site-packages/click/__pycache__/termui.cpython-312.pyc index 39cacf5d..8abb5461 100644 Binary files a/venv/lib/python3.12/site-packages/click/__pycache__/termui.cpython-312.pyc and b/venv/lib/python3.12/site-packages/click/__pycache__/termui.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/click/__pycache__/testing.cpython-312.pyc b/venv/lib/python3.12/site-packages/click/__pycache__/testing.cpython-312.pyc index 7cadcea6..ee3bb629 100644 Binary files a/venv/lib/python3.12/site-packages/click/__pycache__/testing.cpython-312.pyc and b/venv/lib/python3.12/site-packages/click/__pycache__/testing.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/click/__pycache__/types.cpython-312.pyc b/venv/lib/python3.12/site-packages/click/__pycache__/types.cpython-312.pyc index e698fd2d..d444677c 100644 Binary files a/venv/lib/python3.12/site-packages/click/__pycache__/types.cpython-312.pyc and b/venv/lib/python3.12/site-packages/click/__pycache__/types.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/click/__pycache__/utils.cpython-312.pyc b/venv/lib/python3.12/site-packages/click/__pycache__/utils.cpython-312.pyc index 43365991..ca8edba9 100644 Binary files a/venv/lib/python3.12/site-packages/click/__pycache__/utils.cpython-312.pyc and b/venv/lib/python3.12/site-packages/click/__pycache__/utils.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/click/_termui_impl.py b/venv/lib/python3.12/site-packages/click/_termui_impl.py index 47f87b86..ee8225c4 100644 --- a/venv/lib/python3.12/site-packages/click/_termui_impl.py +++ b/venv/lib/python3.12/site-packages/click/_termui_impl.py @@ -429,8 +429,11 @@ def _pipepager( cmd_filepath = shutil.which(cmd) if not cmd_filepath: return False - # Resolves symlinks and produces a normalized absolute path string. - cmd_path = Path(cmd_filepath).resolve() + + # Produces a normalized absolute path string. + # multi-call binaries such as busybox derive their identity from the symlink + # less -> busybox. resolve() causes them to misbehave. (eg. less becomes busybox) + cmd_path = Path(cmd_filepath).absolute() cmd_name = cmd_path.name import subprocess @@ -450,7 +453,7 @@ def _pipepager( c = subprocess.Popen( [str(cmd_path)] + cmd_params, - shell=True, + shell=False, stdin=subprocess.PIPE, env=env, errors="replace", @@ -520,8 +523,10 @@ def _tempfilepager( cmd_filepath = shutil.which(cmd) if not cmd_filepath: return False - # Resolves symlinks and produces a normalized absolute path string. - cmd_path = Path(cmd_filepath).resolve() + # Produces a normalized absolute path string. + # multi-call binaries such as busybox derive their identity from the symlink + # less -> busybox. resolve() causes them to misbehave. (eg. less becomes busybox) + cmd_path = Path(cmd_filepath).absolute() import subprocess import tempfile diff --git a/venv/lib/python3.12/site-packages/click/core.py b/venv/lib/python3.12/site-packages/click/core.py index ff2f74ad..57f549c7 100644 --- a/venv/lib/python3.12/site-packages/click/core.py +++ b/venv/lib/python3.12/site-packages/click/core.py @@ -799,8 +799,18 @@ class Context: for param in other_cmd.params: if param.name not in kwargs and param.expose_value: + default_value = param.get_default(ctx) + # We explicitly hide the :attr:`UNSET` value to the user, as we + # choose to make it an implementation detail. And because ``invoke`` + # has been designed as part of Click public API, we return ``None`` + # instead. Refs: + # https://github.com/pallets/click/issues/3066 + # https://github.com/pallets/click/issues/3065 + # https://github.com/pallets/click/pull/3068 + if default_value is UNSET: + default_value = None kwargs[param.name] = param.type_cast_value( # type: ignore - ctx, param.get_default(ctx) + ctx, default_value ) # Track all kwargs as params, so that forward() will pass @@ -1216,6 +1226,19 @@ class Command: for param in iter_params_for_processing(param_order, self.get_params(ctx)): _, args = param.handle_parse_result(ctx, opts, args) + # We now have all parameters' values into `ctx.params`, but the data may contain + # the `UNSET` sentinel. + # Convert `UNSET` to `None` to ensure that the user doesn't see `UNSET`. + # + # Waiting until after the initial parse to convert allows us to treat `UNSET` + # more like a missing value when multiple params use the same name. + # Refs: + # https://github.com/pallets/click/issues/3071 + # https://github.com/pallets/click/pull/3079 + for name, value in ctx.params.items(): + if value is UNSET: + ctx.params[name] = None + if args and not ctx.allow_extra_args and not ctx.resilient_parsing: ctx.fail( ngettext( @@ -2144,7 +2167,7 @@ class Parameter: self.nargs = nargs self.multiple = multiple self.expose_value = expose_value - self.default = default + self.default: t.Any | t.Callable[[], t.Any] | None = default self.is_eager = is_eager self.metavar = metavar self.envvar = envvar @@ -2315,7 +2338,7 @@ class Parameter: """Convert and validate a value against the parameter's :attr:`type`, :attr:`multiple`, and :attr:`nargs`. """ - if value in (None, UNSET): + if value is None: if self.multiple or self.nargs == -1: return () else: @@ -2398,7 +2421,16 @@ class Parameter: :meta private: """ - value = self.type_cast_value(ctx, value) + # shelter `type_cast_value` from ever seeing an `UNSET` value by handling the + # cases in which `UNSET` gets special treatment explicitly at this layer + # + # Refs: + # https://github.com/pallets/click/issues/3069 + if value is UNSET: + if self.multiple or self.nargs == -1: + value = () + else: + value = self.type_cast_value(ctx, value) if self.required and self.value_is_missing(value): raise MissingParameter(ctx=ctx, param=self) @@ -2408,7 +2440,37 @@ class Parameter: # to None. if value is UNSET: value = None - value = self.callback(ctx, self, value) + + # Search for parameters with UNSET values in the context. + unset_keys = {k: None for k, v in ctx.params.items() if v is UNSET} + # No UNSET values, call the callback as usual. + if not unset_keys: + value = self.callback(ctx, self, value) + + # Legacy case: provide a temporarily manipulated context to the callback + # to hide UNSET values as None. + # + # Refs: + # https://github.com/pallets/click/issues/3136 + # https://github.com/pallets/click/pull/3137 + else: + # Add another layer to the context stack to clearly hint that the + # context is temporarily modified. + with ctx: + # Update the context parameters to replace UNSET with None. + ctx.params.update(unset_keys) + # Feed these fake context parameters to the callback. + value = self.callback(ctx, self, value) + # Restore the UNSET values in the context parameters. + ctx.params.update( + { + k: UNSET + for k in unset_keys + # Only restore keys that are present and still None, in case + # the callback modified other parameters. + if k in ctx.params and ctx.params[k] is None + } + ) return value @@ -2528,17 +2590,14 @@ class Parameter: # We skip adding the value if it was previously set by another parameter # targeting the same variable name. This prevents parameters competing for # the same name to override each other. - and self.name not in ctx.params + and (self.name not in ctx.params or ctx.params[self.name] is UNSET) ): # Click is logically enforcing that the name is None if the parameter is # not to be exposed. We still assert it here to please the type checker. assert self.name is not None, ( f"{self!r} parameter's name should not be None when exposing value." ) - # Normalize UNSET values to None, as we're about to pass them to the - # command function and move them to the pure-Python realm of user-written - # code. - ctx.params[self.name] = value if value is not UNSET else None + ctx.params[self.name] = value return value, args @@ -2733,7 +2792,7 @@ class Option(Parameter): if type is None: # A flag without a flag_value is a boolean flag. if flag_value is UNSET: - self.type = types.BoolParamType() + self.type: types.ParamType = types.BoolParamType() # If the flag value is a boolean, use BoolParamType. elif isinstance(flag_value, bool): self.type = types.BoolParamType() @@ -3236,13 +3295,22 @@ class Option(Parameter): return value, source - def type_cast_value(self, ctx: Context, value: t.Any) -> t.Any: - if self.is_flag and not self.required: - if value is UNSET: - if self.is_bool_flag: - # If the flag is a boolean flag, we return False if it is not set. - value = False - return super().type_cast_value(ctx, value) + def process_value(self, ctx: Context, value: t.Any) -> t.Any: + # process_value has to be overridden on Options in order to capture + # `value == UNSET` cases before `type_cast_value()` gets called. + # + # Refs: + # https://github.com/pallets/click/issues/3069 + if self.is_flag and not self.required and self.is_bool_flag and value is UNSET: + value = False + + if self.callback is not None: + value = self.callback(ctx, self, value) + + return value + + # in the normal case, rely on Parameter.process_value + return super().process_value(ctx, value) class Argument(Parameter): diff --git a/venv/lib/python3.12/site-packages/click/termui.py b/venv/lib/python3.12/site-packages/click/termui.py index dcbb2221..2e98a077 100644 --- a/venv/lib/python3.12/site-packages/click/termui.py +++ b/venv/lib/python3.12/site-packages/click/termui.py @@ -119,6 +119,9 @@ def prompt( show_choices is true and text is "Group by" then the prompt will be "Group by (day, week): ". + .. versionchanged:: 8.3.1 + A space is no longer appended to the prompt. + .. versionadded:: 8.0 ``confirmation_prompt`` can be a custom string. @@ -138,10 +141,10 @@ def prompt( try: # Write the prompt separately so that we get nice # coloring through colorama on Windows - echo(text.rstrip(" "), nl=False, err=err) - # Echo a space to stdout to work around an issue where + echo(text[:-1], nl=False, err=err) + # Echo the last character to stdout to work around an issue where # readline causes backspace to clear the whole line. - return f(" ") + return f(text[-1:]) except (KeyboardInterrupt, EOFError): # getpass doesn't print a newline if the user aborts input with ^C. # Allegedly this behavior is inherited from getpass(3). @@ -214,6 +217,9 @@ def confirm( :param err: if set to true the file defaults to ``stderr`` instead of ``stdout``, the same as with echo. + .. versionchanged:: 8.3.1 + A space is no longer appended to the prompt. + .. versionchanged:: 8.0 Repeat until input is given if ``default`` is ``None``. @@ -231,10 +237,10 @@ def confirm( try: # Write the prompt separately so that we get nice # coloring through colorama on Windows - echo(prompt.rstrip(" "), nl=False, err=err) - # Echo a space to stdout to work around an issue where + echo(prompt[:-1], nl=False, err=err) + # Echo the last character to stdout to work around an issue where # readline causes backspace to clear the whole line. - value = visible_prompt_func(" ").lower().strip() + value = visible_prompt_func(prompt[-1:]).lower().strip() except (KeyboardInterrupt, EOFError): raise Abort() from None if value in ("y", "yes"): diff --git a/venv/lib/python3.12/site-packages/cryptography-46.0.3.dist-info/INSTALLER b/venv/lib/python3.12/site-packages/cryptography-46.0.3.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography-46.0.3.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.12/site-packages/cryptography-46.0.3.dist-info/METADATA b/venv/lib/python3.12/site-packages/cryptography-46.0.3.dist-info/METADATA new file mode 100644 index 00000000..7b07ee7b --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography-46.0.3.dist-info/METADATA @@ -0,0 +1,139 @@ +Metadata-Version: 2.4 +Name: cryptography +Version: 46.0.3 +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Natural Language :: English +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Operating System :: POSIX +Classifier: Operating System :: POSIX :: BSD +Classifier: Operating System :: POSIX :: Linux +Classifier: Operating System :: Microsoft :: Windows +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: 3.14 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Programming Language :: Python :: Free Threading :: 3 - Stable +Classifier: Topic :: Security :: Cryptography +Requires-Dist: cffi>=1.14 ; python_full_version == '3.8.*' and platform_python_implementation != 'PyPy' +Requires-Dist: cffi>=2.0.0 ; python_full_version >= '3.9' and platform_python_implementation != 'PyPy' +Requires-Dist: typing-extensions>=4.13.2 ; python_full_version < '3.11' +Requires-Dist: bcrypt>=3.1.5 ; extra == 'ssh' +Requires-Dist: nox[uv]>=2024.4.15 ; extra == 'nox' +Requires-Dist: cryptography-vectors==46.0.3 ; extra == 'test' +Requires-Dist: pytest>=7.4.0 ; extra == 'test' +Requires-Dist: pytest-benchmark>=4.0 ; extra == 'test' +Requires-Dist: pytest-cov>=2.10.1 ; extra == 'test' +Requires-Dist: pytest-xdist>=3.5.0 ; extra == 'test' +Requires-Dist: pretend>=0.7 ; extra == 'test' +Requires-Dist: certifi>=2024 ; extra == 'test' +Requires-Dist: pytest-randomly ; extra == 'test-randomorder' +Requires-Dist: sphinx>=5.3.0 ; extra == 'docs' +Requires-Dist: sphinx-rtd-theme>=3.0.0 ; extra == 'docs' +Requires-Dist: sphinx-inline-tabs ; extra == 'docs' +Requires-Dist: pyenchant>=3 ; extra == 'docstest' +Requires-Dist: readme-renderer>=30.0 ; extra == 'docstest' +Requires-Dist: sphinxcontrib-spelling>=7.3.1 ; extra == 'docstest' +Requires-Dist: build>=1.0.0 ; extra == 'sdist' +Requires-Dist: ruff>=0.11.11 ; extra == 'pep8test' +Requires-Dist: mypy>=1.14 ; extra == 'pep8test' +Requires-Dist: check-sdist ; extra == 'pep8test' +Requires-Dist: click>=8.0.1 ; extra == 'pep8test' +Provides-Extra: ssh +Provides-Extra: nox +Provides-Extra: test +Provides-Extra: test-randomorder +Provides-Extra: docs +Provides-Extra: docstest +Provides-Extra: sdist +Provides-Extra: pep8test +License-File: LICENSE +License-File: LICENSE.APACHE +License-File: LICENSE.BSD +Summary: cryptography is a package which provides cryptographic recipes and primitives to Python developers. +Author-email: The Python Cryptographic Authority and individual contributors +License-Expression: Apache-2.0 OR BSD-3-Clause +Requires-Python: >=3.8, !=3.9.0, !=3.9.1 +Description-Content-Type: text/x-rst; charset=UTF-8 +Project-URL: homepage, https://github.com/pyca/cryptography +Project-URL: documentation, https://cryptography.io/ +Project-URL: source, https://github.com/pyca/cryptography/ +Project-URL: issues, https://github.com/pyca/cryptography/issues +Project-URL: changelog, https://cryptography.io/en/latest/changelog/ + +pyca/cryptography +================= + +.. image:: https://img.shields.io/pypi/v/cryptography.svg + :target: https://pypi.org/project/cryptography/ + :alt: Latest Version + +.. image:: https://readthedocs.org/projects/cryptography/badge/?version=latest + :target: https://cryptography.io + :alt: Latest Docs + +.. image:: https://github.com/pyca/cryptography/actions/workflows/ci.yml/badge.svg + :target: https://github.com/pyca/cryptography/actions/workflows/ci.yml?query=branch%3Amain + +``cryptography`` is a package which provides cryptographic recipes and +primitives to Python developers. Our goal is for it to be your "cryptographic +standard library". It supports Python 3.8+ and PyPy3 7.3.11+. + +``cryptography`` includes both high level recipes and low level interfaces to +common cryptographic algorithms such as symmetric ciphers, message digests, and +key derivation functions. For example, to encrypt something with +``cryptography``'s high level symmetric encryption recipe: + +.. code-block:: pycon + + >>> from cryptography.fernet import Fernet + >>> # Put this somewhere safe! + >>> key = Fernet.generate_key() + >>> f = Fernet(key) + >>> token = f.encrypt(b"A really secret message. Not for prying eyes.") + >>> token + b'...' + >>> f.decrypt(token) + b'A really secret message. Not for prying eyes.' + +You can find more information in the `documentation`_. + +You can install ``cryptography`` with: + +.. code-block:: console + + $ pip install cryptography + +For full details see `the installation documentation`_. + +Discussion +~~~~~~~~~~ + +If you run into bugs, you can file them in our `issue tracker`_. + +We maintain a `cryptography-dev`_ mailing list for development discussion. + +You can also join ``#pyca`` on ``irc.libera.chat`` to ask questions or get +involved. + +Security +~~~~~~~~ + +Need to report a security issue? Please consult our `security reporting`_ +documentation. + + +.. _`documentation`: https://cryptography.io/ +.. _`the installation documentation`: https://cryptography.io/en/latest/installation/ +.. _`issue tracker`: https://github.com/pyca/cryptography/issues +.. _`cryptography-dev`: https://mail.python.org/mailman/listinfo/cryptography-dev +.. _`security reporting`: https://cryptography.io/en/latest/security/ + diff --git a/venv/lib/python3.12/site-packages/cryptography-46.0.3.dist-info/RECORD b/venv/lib/python3.12/site-packages/cryptography-46.0.3.dist-info/RECORD new file mode 100644 index 00000000..9099b5bc --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography-46.0.3.dist-info/RECORD @@ -0,0 +1,180 @@ +cryptography-46.0.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +cryptography-46.0.3.dist-info/METADATA,sha256=bx2LyCEmOVUC8FH5hsGEZewWPiZoIIYTq0hM9mu9r4s,5748 +cryptography-46.0.3.dist-info/RECORD,, +cryptography-46.0.3.dist-info/WHEEL,sha256=jkxrJemT4jZpYSr-u9xPalWqoow8benNmiXfjKXLlJw,108 +cryptography-46.0.3.dist-info/licenses/LICENSE,sha256=Pgx8CRqUi4JTO6mP18u0BDLW8amsv4X1ki0vmak65rs,197 +cryptography-46.0.3.dist-info/licenses/LICENSE.APACHE,sha256=qsc7MUj20dcRHbyjIJn2jSbGRMaBOuHk8F9leaomY_4,11360 +cryptography-46.0.3.dist-info/licenses/LICENSE.BSD,sha256=YCxMdILeZHndLpeTzaJ15eY9dz2s0eymiSMqtwCPtPs,1532 +cryptography/__about__.py,sha256=QCLxNH_Abbygdc9RQGpUmrK14Wp3Cl_SEiB2byLwyxo,445 +cryptography/__init__.py,sha256=mthuUrTd4FROCpUYrTIqhjz6s6T9djAZrV7nZ1oMm2o,364 +cryptography/__pycache__/__about__.cpython-312.pyc,, +cryptography/__pycache__/__init__.cpython-312.pyc,, +cryptography/__pycache__/exceptions.cpython-312.pyc,, +cryptography/__pycache__/fernet.cpython-312.pyc,, +cryptography/__pycache__/utils.cpython-312.pyc,, +cryptography/exceptions.py,sha256=835EWILc2fwxw-gyFMriciC2SqhViETB10LBSytnDIc,1087 +cryptography/fernet.py,sha256=3Cvxkh0KJSbX8HbnCHu4wfCW7U0GgfUA3v_qQ8a8iWc,6963 +cryptography/hazmat/__init__.py,sha256=5IwrLWrVp0AjEr_4FdWG_V057NSJGY_W4egNNsuct0g,455 +cryptography/hazmat/__pycache__/__init__.cpython-312.pyc,, +cryptography/hazmat/__pycache__/_oid.cpython-312.pyc,, +cryptography/hazmat/_oid.py,sha256=p8ThjwJB56Ci_rAIrjyJ1f8VjgD6e39es2dh8JIUBOw,17240 +cryptography/hazmat/asn1/__init__.py,sha256=hS_EWx3wVvZzfbCcNV8hzcDnyMM8H-BhIoS1TipUosk,293 +cryptography/hazmat/asn1/__pycache__/__init__.cpython-312.pyc,, +cryptography/hazmat/asn1/__pycache__/asn1.cpython-312.pyc,, +cryptography/hazmat/asn1/asn1.py,sha256=eMEThEXa19LQjcyVofgHsW6tsZnjp3ddH7bWkkcxfLM,3860 +cryptography/hazmat/backends/__init__.py,sha256=O5jvKFQdZnXhKeqJ-HtulaEL9Ni7mr1mDzZY5kHlYhI,361 +cryptography/hazmat/backends/__pycache__/__init__.cpython-312.pyc,, +cryptography/hazmat/backends/openssl/__init__.py,sha256=p3jmJfnCag9iE5sdMrN6VvVEu55u46xaS_IjoI0SrmA,305 +cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-312.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-312.pyc,, +cryptography/hazmat/backends/openssl/backend.py,sha256=tV5AxBoFJ2GfA0DMWSY-0TxQJrpQoexzI9R4Kybb--4,10215 +cryptography/hazmat/bindings/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180 +cryptography/hazmat/bindings/__pycache__/__init__.cpython-312.pyc,, +cryptography/hazmat/bindings/_rust.abi3.so,sha256=4bUN0J2p_ZQMdgmAc9eL0VMj_lgbTsHUmX4doekVIJ4,12955672 +cryptography/hazmat/bindings/_rust/__init__.pyi,sha256=KhqLhXFPArPzzJ7DYO9Fl8FoXB_BagAd_r4Dm_Ze9Xo,1257 +cryptography/hazmat/bindings/_rust/_openssl.pyi,sha256=mpNJLuYLbCVrd5i33FBTmWwL_55Dw7JPkSLlSX9Q7oI,230 +cryptography/hazmat/bindings/_rust/asn1.pyi,sha256=BrGjC8J6nwuS-r3EVcdXJB8ndotfY9mbQYOfpbPG0HA,354 +cryptography/hazmat/bindings/_rust/declarative_asn1.pyi,sha256=2ECFmYue1EPkHEE2Bm7aLwkjB0mSUTpr23v9MN4pri4,892 +cryptography/hazmat/bindings/_rust/exceptions.pyi,sha256=exXr2xw_0pB1kk93cYbM3MohbzoUkjOms1ZMUi0uQZE,640 +cryptography/hazmat/bindings/_rust/ocsp.pyi,sha256=VPVWuKHI9EMs09ZLRYAGvR0Iz0mCMmEzXAkgJHovpoM,4020 +cryptography/hazmat/bindings/_rust/openssl/__init__.pyi,sha256=iOAMDyHoNwwCSZfZzuXDr64g4GpGUeDgEN-LjXqdrBM,1522 +cryptography/hazmat/bindings/_rust/openssl/aead.pyi,sha256=4Nddw6-ynzIB3w2W86WvkGKTLlTDk_6F5l54RHCuy3E,2688 +cryptography/hazmat/bindings/_rust/openssl/ciphers.pyi,sha256=LhPzHWSXJq4grAJXn6zSvSSdV-aYIIscHDwIPlJGGPs,1315 +cryptography/hazmat/bindings/_rust/openssl/cmac.pyi,sha256=nPH0X57RYpsAkRowVpjQiHE566ThUTx7YXrsadmrmHk,564 +cryptography/hazmat/bindings/_rust/openssl/dh.pyi,sha256=Z3TC-G04-THtSdAOPLM1h2G7ml5bda1ElZUcn5wpuhk,1564 +cryptography/hazmat/bindings/_rust/openssl/dsa.pyi,sha256=qBtkgj2albt2qFcnZ9UDrhzoNhCVO7HTby5VSf1EXMI,1299 +cryptography/hazmat/bindings/_rust/openssl/ec.pyi,sha256=zJy0pRa5n-_p2dm45PxECB_-B6SVZyNKfjxFDpPqT38,1691 +cryptography/hazmat/bindings/_rust/openssl/ed25519.pyi,sha256=VXfXd5G6hUivg399R1DYdmW3eTb0EebzDTqjRC2gaRw,532 +cryptography/hazmat/bindings/_rust/openssl/ed448.pyi,sha256=Yx49lqdnjsD7bxiDV1kcaMrDktug5evi5a6zerMiy2s,514 +cryptography/hazmat/bindings/_rust/openssl/hashes.pyi,sha256=OWZvBx7xfo_HJl41Nc--DugVyCVPIprZ3HlOPTSWH9g,984 +cryptography/hazmat/bindings/_rust/openssl/hmac.pyi,sha256=BXZn7NDjL3JAbYW0SQ8pg1iyC5DbQXVhUAiwsi8DFR8,702 +cryptography/hazmat/bindings/_rust/openssl/kdf.pyi,sha256=xXfFBb9QehHfDtEaxV_65Z0YK7NquOVIChpTLkgAs_k,2029 +cryptography/hazmat/bindings/_rust/openssl/keys.pyi,sha256=teIt8M6ZEMJrn4s3W0UnW0DZ-30Jd68WnSsKKG124l0,912 +cryptography/hazmat/bindings/_rust/openssl/poly1305.pyi,sha256=_SW9NtQ5FDlAbdclFtWpT4lGmxKIKHpN-4j8J2BzYfQ,585 +cryptography/hazmat/bindings/_rust/openssl/rsa.pyi,sha256=2OQCNSXkxgc-3uw1xiCCloIQTV6p9_kK79Yu0rhZgPc,1364 +cryptography/hazmat/bindings/_rust/openssl/x25519.pyi,sha256=ewn4GpQyb7zPwE-ni7GtyQgMC0A1mLuqYsSyqv6nI_s,523 +cryptography/hazmat/bindings/_rust/openssl/x448.pyi,sha256=juTZTmli8jO_5Vcufg-vHvx_tCyezmSLIh_9PU3TczI,505 +cryptography/hazmat/bindings/_rust/pkcs12.pyi,sha256=vEEd5wDiZvb8ZGFaziLCaWLzAwoG_tvPUxLQw5_uOl8,1605 +cryptography/hazmat/bindings/_rust/pkcs7.pyi,sha256=txGBJijqZshEcqra6byPNbnisIdlxzOSIHP2hl9arPs,1601 +cryptography/hazmat/bindings/_rust/test_support.pyi,sha256=PPhld-WkO743iXFPebeG0LtgK0aTzGdjcIsay1Gm5GE,757 +cryptography/hazmat/bindings/_rust/x509.pyi,sha256=n9X0IQ6ICbdIi-ExdCFZoBgeY6njm3QOVAVZwDQdnbk,9784 +cryptography/hazmat/bindings/openssl/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180 +cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-312.pyc,, +cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-312.pyc,, +cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-312.pyc,, +cryptography/hazmat/bindings/openssl/_conditional.py,sha256=DMOpA_XN4l70zTc5_J9DpwlbQeUBRTWpfIJ4yRIn1-U,5791 +cryptography/hazmat/bindings/openssl/binding.py,sha256=x8eocEmukO4cm7cHqfVmOoYY7CCXdoF1v1WhZQt9neo,4610 +cryptography/hazmat/decrepit/__init__.py,sha256=wHCbWfaefa-fk6THSw9th9fJUsStJo7245wfFBqmduA,216 +cryptography/hazmat/decrepit/__pycache__/__init__.cpython-312.pyc,, +cryptography/hazmat/decrepit/ciphers/__init__.py,sha256=wHCbWfaefa-fk6THSw9th9fJUsStJo7245wfFBqmduA,216 +cryptography/hazmat/decrepit/ciphers/__pycache__/__init__.cpython-312.pyc,, +cryptography/hazmat/decrepit/ciphers/__pycache__/algorithms.cpython-312.pyc,, +cryptography/hazmat/decrepit/ciphers/algorithms.py,sha256=YrKgHS4MfwWaMmPBYRymRRlC0phwWp9ycICFezeJPGk,2595 +cryptography/hazmat/primitives/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180 +cryptography/hazmat/primitives/__pycache__/__init__.cpython-312.pyc,, +cryptography/hazmat/primitives/__pycache__/_asymmetric.cpython-312.pyc,, +cryptography/hazmat/primitives/__pycache__/_cipheralgorithm.cpython-312.pyc,, +cryptography/hazmat/primitives/__pycache__/_serialization.cpython-312.pyc,, +cryptography/hazmat/primitives/__pycache__/cmac.cpython-312.pyc,, +cryptography/hazmat/primitives/__pycache__/constant_time.cpython-312.pyc,, +cryptography/hazmat/primitives/__pycache__/hashes.cpython-312.pyc,, +cryptography/hazmat/primitives/__pycache__/hmac.cpython-312.pyc,, +cryptography/hazmat/primitives/__pycache__/keywrap.cpython-312.pyc,, +cryptography/hazmat/primitives/__pycache__/padding.cpython-312.pyc,, +cryptography/hazmat/primitives/__pycache__/poly1305.cpython-312.pyc,, +cryptography/hazmat/primitives/_asymmetric.py,sha256=RhgcouUB6HTiFDBrR1LxqkMjpUxIiNvQ1r_zJjRG6qQ,532 +cryptography/hazmat/primitives/_cipheralgorithm.py,sha256=Eh3i7lwedHfi0eLSsH93PZxQKzY9I6lkK67vL4V5tOc,1522 +cryptography/hazmat/primitives/_serialization.py,sha256=chgPCSF2jxI2Cr5gB-qbWXOvOfupBh4CARS0KAhv9AM,5123 +cryptography/hazmat/primitives/asymmetric/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180 +cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-312.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-312.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-312.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-312.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/ed25519.cpython-312.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/ed448.cpython-312.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-312.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-312.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/types.cpython-312.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-312.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/x25519.cpython-312.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/x448.cpython-312.pyc,, +cryptography/hazmat/primitives/asymmetric/dh.py,sha256=0v_vEFFz5pQ1QG-FkWDyvgv7IfuVZSH5Q6LyFI5A8rg,3645 +cryptography/hazmat/primitives/asymmetric/dsa.py,sha256=Ld_bbbqQFz12dObHxIkzEQzX0SWWP41RLSWkYSaKhqE,4213 +cryptography/hazmat/primitives/asymmetric/ec.py,sha256=Vf5ig2PcS3PVnsb5N49Kx1uIkFBJyhg4BWXThDz5cug,12999 +cryptography/hazmat/primitives/asymmetric/ed25519.py,sha256=jZW5cs472wXXV3eB0sE1b8w64gdazwwU0_MT5UOTiXs,3700 +cryptography/hazmat/primitives/asymmetric/ed448.py,sha256=yAetgn2f2JYf0BO8MapGzXeThsvSMG5LmUCrxVOidAA,3729 +cryptography/hazmat/primitives/asymmetric/padding.py,sha256=vQ6l6gOg9HqcbOsvHrSiJRVLdEj9L4m4HkRGYziTyFA,2854 +cryptography/hazmat/primitives/asymmetric/rsa.py,sha256=ZnKOo2f34MCCOupC03Y1uR-_jiSG5IrelHEmxaME3D4,8303 +cryptography/hazmat/primitives/asymmetric/types.py,sha256=LnsOJym-wmPUJ7Knu_7bCNU3kIiELCd6krOaW_JU08I,2996 +cryptography/hazmat/primitives/asymmetric/utils.py,sha256=DPTs6T4F-UhwzFQTh-1fSEpQzazH2jf2xpIro3ItF4o,790 +cryptography/hazmat/primitives/asymmetric/x25519.py,sha256=_4nQeZ3yJ3Lg0RpXnaqA-1yt6vbx1F-wzLcaZHwSpeE,3613 +cryptography/hazmat/primitives/asymmetric/x448.py,sha256=WKBLtuVfJqiBRro654fGaQAlvsKbqbNkK7c4A_ZCdV0,3642 +cryptography/hazmat/primitives/ciphers/__init__.py,sha256=eyEXmjk6_CZXaOPYDr7vAYGXr29QvzgWL2-4CSolLFs,680 +cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-312.pyc,, +cryptography/hazmat/primitives/ciphers/__pycache__/aead.cpython-312.pyc,, +cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-312.pyc,, +cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-312.pyc,, +cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-312.pyc,, +cryptography/hazmat/primitives/ciphers/aead.py,sha256=Fzlyx7w8KYQakzDp1zWgJnIr62zgZrgVh1u2h4exB54,634 +cryptography/hazmat/primitives/ciphers/algorithms.py,sha256=Q7ZJwcsx83Mgxv5y7r6CyJKSdsOwC-my-5A67-ma2vw,3407 +cryptography/hazmat/primitives/ciphers/base.py,sha256=aBC7HHBBoixebmparVr0UlODs3VD0A7B6oz_AaRjDv8,4253 +cryptography/hazmat/primitives/ciphers/modes.py,sha256=20stpwhDtbAvpH0SMf9EDHIciwmTF-JMBUOZ9bU8WiQ,8318 +cryptography/hazmat/primitives/cmac.py,sha256=sz_s6H_cYnOvx-VNWdIKhRhe3Ymp8z8J0D3CBqOX3gg,338 +cryptography/hazmat/primitives/constant_time.py,sha256=xdunWT0nf8OvKdcqUhhlFKayGp4_PgVJRU2W1wLSr_A,422 +cryptography/hazmat/primitives/hashes.py,sha256=M8BrlKB3U6DEtHvWTV5VRjpteHv1kS3Zxm_Bsk04cr8,5184 +cryptography/hazmat/primitives/hmac.py,sha256=RpB3z9z5skirCQrm7zQbtnp9pLMnAjrlTUvKqF5aDDc,423 +cryptography/hazmat/primitives/kdf/__init__.py,sha256=4XibZnrYq4hh5xBjWiIXzaYW6FKx8hPbVaa_cB9zS64,750 +cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-312.pyc,, +cryptography/hazmat/primitives/kdf/__pycache__/argon2.cpython-312.pyc,, +cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-312.pyc,, +cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-312.pyc,, +cryptography/hazmat/primitives/kdf/__pycache__/kbkdf.cpython-312.pyc,, +cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-312.pyc,, +cryptography/hazmat/primitives/kdf/__pycache__/scrypt.cpython-312.pyc,, +cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-312.pyc,, +cryptography/hazmat/primitives/kdf/argon2.py,sha256=UFDNXG0v-rw3DqAQTB1UQAsQC2M5Ejg0k_6OCyhLKus,460 +cryptography/hazmat/primitives/kdf/concatkdf.py,sha256=Ua8KoLXXnzgsrAUmHpyKymaPt8aPRP0EHEaBz7QCQ9I,3737 +cryptography/hazmat/primitives/kdf/hkdf.py,sha256=M0lAEfRoc4kpp4-nwDj9yB-vNZukIOYEQrUlWsBNn9o,543 +cryptography/hazmat/primitives/kdf/kbkdf.py,sha256=oZepvo4evhKkkJQWRDwaPoIbyTaFmDc5NPimxg6lfKg,9165 +cryptography/hazmat/primitives/kdf/pbkdf2.py,sha256=1WIwhELR0w8ztTpTu8BrFiYWmK3hUfJq08I79TxwieE,1957 +cryptography/hazmat/primitives/kdf/scrypt.py,sha256=XyWUdUUmhuI9V6TqAPOvujCSMGv1XQdg0a21IWCmO-U,590 +cryptography/hazmat/primitives/kdf/x963kdf.py,sha256=zLTcF665QFvXX2f8TS7fmBZTteXpFjKahzfjjQcCJyw,1999 +cryptography/hazmat/primitives/keywrap.py,sha256=XV4Pj2fqSeD-RqZVvY2cA3j5_7RwJSFygYuLfk2ujCo,5650 +cryptography/hazmat/primitives/padding.py,sha256=QT-U-NvV2eQGO1wVPbDiNGNSc9keRDS-ig5cQOrLz0E,1865 +cryptography/hazmat/primitives/poly1305.py,sha256=P5EPQV-RB_FJPahpg01u0Ts4S_PnAmsroxIGXbGeRRo,355 +cryptography/hazmat/primitives/serialization/__init__.py,sha256=Q7uTgDlt7n3WfsMT6jYwutC6DIg_7SEeoAm1GHZ5B5E,1705 +cryptography/hazmat/primitives/serialization/__pycache__/__init__.cpython-312.pyc,, +cryptography/hazmat/primitives/serialization/__pycache__/base.cpython-312.pyc,, +cryptography/hazmat/primitives/serialization/__pycache__/pkcs12.cpython-312.pyc,, +cryptography/hazmat/primitives/serialization/__pycache__/pkcs7.cpython-312.pyc,, +cryptography/hazmat/primitives/serialization/__pycache__/ssh.cpython-312.pyc,, +cryptography/hazmat/primitives/serialization/base.py,sha256=ikq5MJIwp_oUnjiaBco_PmQwOTYuGi-XkYUYHKy8Vo0,615 +cryptography/hazmat/primitives/serialization/pkcs12.py,sha256=mS9cFNG4afzvseoc5e1MWoY2VskfL8N8Y_OFjl67luY,5104 +cryptography/hazmat/primitives/serialization/pkcs7.py,sha256=5OR_Tkysxaprn4FegvJIfbep9rJ9wok6FLWvWwQ5-Mg,13943 +cryptography/hazmat/primitives/serialization/ssh.py,sha256=hPV5obFznz0QhFfXFPOeQ8y6MsurA0xVMQiLnLESEs8,53700 +cryptography/hazmat/primitives/twofactor/__init__.py,sha256=tmMZGB-g4IU1r7lIFqASU019zr0uPp_wEBYcwdDCKCA,258 +cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-312.pyc,, +cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-312.pyc,, +cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-312.pyc,, +cryptography/hazmat/primitives/twofactor/hotp.py,sha256=ivZo5BrcCGWLsqql4nZV0XXCjyGPi_iHfDFltGlOJwk,3256 +cryptography/hazmat/primitives/twofactor/totp.py,sha256=m5LPpRL00kp4zY8gTjr55Hfz9aMlPS53kHmVkSQCmdY,1652 +cryptography/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +cryptography/utils.py,sha256=bZAjFC5KVpfmF29qS_18vvpW3mKxmdiRALcusHhTTkg,4301 +cryptography/x509/__init__.py,sha256=xloN0swseNx-m2WFZmCA17gOoxQWqeU82UVjEdJBePQ,8257 +cryptography/x509/__pycache__/__init__.cpython-312.pyc,, +cryptography/x509/__pycache__/base.cpython-312.pyc,, +cryptography/x509/__pycache__/certificate_transparency.cpython-312.pyc,, +cryptography/x509/__pycache__/extensions.cpython-312.pyc,, +cryptography/x509/__pycache__/general_name.cpython-312.pyc,, +cryptography/x509/__pycache__/name.cpython-312.pyc,, +cryptography/x509/__pycache__/ocsp.cpython-312.pyc,, +cryptography/x509/__pycache__/oid.cpython-312.pyc,, +cryptography/x509/__pycache__/verification.cpython-312.pyc,, +cryptography/x509/base.py,sha256=OrmTw3y8B6AE_nGXQPN8x9kq-d7rDWeH13gCq6T6D6U,27997 +cryptography/x509/certificate_transparency.py,sha256=JqoOIDhlwInrYMFW6IFn77WJ0viF-PB_rlZV3vs9MYc,797 +cryptography/x509/extensions.py,sha256=QxYrqR6SF1qzR9ZraP8wDiIczlEVlAFuwDRVcltB6Tk,77724 +cryptography/x509/general_name.py,sha256=sP_rV11Qlpsk4x3XXGJY_Mv0Q_s9dtjeLckHsjpLQoQ,7836 +cryptography/x509/name.py,sha256=ty0_xf0LnHwZAdEf-d8FLO1K4hGqx_7DsD3CHwoLJiY,15101 +cryptography/x509/ocsp.py,sha256=Yey6NdFV1MPjop24Mj_VenjEpg3kUaMopSWOK0AbeBs,12699 +cryptography/x509/oid.py,sha256=BUzgXXGVWilkBkdKPTm9R4qElE9gAGHgdYPMZAp7PJo,931 +cryptography/x509/verification.py,sha256=gR2C2c-XZQtblZhT5T5vjSKOtCb74ef2alPVmEcwFlM,958 diff --git a/venv/lib/python3.12/site-packages/cryptography-46.0.3.dist-info/WHEEL b/venv/lib/python3.12/site-packages/cryptography-46.0.3.dist-info/WHEEL new file mode 100644 index 00000000..8e48aa14 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography-46.0.3.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: maturin (1.9.4) +Root-Is-Purelib: false +Tag: cp311-abi3-manylinux_2_34_x86_64 + diff --git a/venv/lib/python3.12/site-packages/cryptography-46.0.3.dist-info/licenses/LICENSE b/venv/lib/python3.12/site-packages/cryptography-46.0.3.dist-info/licenses/LICENSE new file mode 100644 index 00000000..b11f379e --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography-46.0.3.dist-info/licenses/LICENSE @@ -0,0 +1,3 @@ +This software is made available under the terms of *either* of the licenses +found in LICENSE.APACHE or LICENSE.BSD. Contributions to cryptography are made +under the terms of *both* these licenses. diff --git a/venv/lib/python3.12/site-packages/cryptography-46.0.3.dist-info/licenses/LICENSE.APACHE b/venv/lib/python3.12/site-packages/cryptography-46.0.3.dist-info/licenses/LICENSE.APACHE new file mode 100644 index 00000000..62589edd --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography-46.0.3.dist-info/licenses/LICENSE.APACHE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + https://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/venv/lib/python3.12/site-packages/cryptography-46.0.3.dist-info/licenses/LICENSE.BSD b/venv/lib/python3.12/site-packages/cryptography-46.0.3.dist-info/licenses/LICENSE.BSD new file mode 100644 index 00000000..ec1a29d3 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography-46.0.3.dist-info/licenses/LICENSE.BSD @@ -0,0 +1,27 @@ +Copyright (c) Individual contributors. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + 3. Neither the name of PyCA Cryptography nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python3.12/site-packages/cryptography/__about__.py b/venv/lib/python3.12/site-packages/cryptography/__about__.py new file mode 100644 index 00000000..a8116284 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/__about__.py @@ -0,0 +1,17 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +__all__ = [ + "__author__", + "__copyright__", + "__version__", +] + +__version__ = "46.0.3" + + +__author__ = "The Python Cryptographic Authority and individual contributors" +__copyright__ = f"Copyright 2013-2025 {__author__}" diff --git a/venv/lib/python3.12/site-packages/cryptography/__init__.py b/venv/lib/python3.12/site-packages/cryptography/__init__.py new file mode 100644 index 00000000..d374f752 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/__init__.py @@ -0,0 +1,13 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.__about__ import __author__, __copyright__, __version__ + +__all__ = [ + "__author__", + "__copyright__", + "__version__", +] diff --git a/venv/lib/python3.12/site-packages/cryptography/__pycache__/__about__.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/__pycache__/__about__.cpython-312.pyc new file mode 100644 index 00000000..cfa2b097 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/__pycache__/__about__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..b1bb4623 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/__pycache__/exceptions.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/__pycache__/exceptions.cpython-312.pyc new file mode 100644 index 00000000..7f5a5b92 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/__pycache__/exceptions.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/__pycache__/fernet.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/__pycache__/fernet.cpython-312.pyc new file mode 100644 index 00000000..faecdb1b Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/__pycache__/fernet.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/__pycache__/utils.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/__pycache__/utils.cpython-312.pyc new file mode 100644 index 00000000..8aaa9e6c Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/__pycache__/utils.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/exceptions.py b/venv/lib/python3.12/site-packages/cryptography/exceptions.py new file mode 100644 index 00000000..fe125ea9 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/exceptions.py @@ -0,0 +1,52 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import typing + +from cryptography.hazmat.bindings._rust import exceptions as rust_exceptions + +if typing.TYPE_CHECKING: + from cryptography.hazmat.bindings._rust import openssl as rust_openssl + +_Reasons = rust_exceptions._Reasons + + +class UnsupportedAlgorithm(Exception): + def __init__(self, message: str, reason: _Reasons | None = None) -> None: + super().__init__(message) + self._reason = reason + + +class AlreadyFinalized(Exception): + pass + + +class AlreadyUpdated(Exception): + pass + + +class NotYetFinalized(Exception): + pass + + +class InvalidTag(Exception): + pass + + +class InvalidSignature(Exception): + pass + + +class InternalError(Exception): + def __init__( + self, msg: str, err_code: list[rust_openssl.OpenSSLError] + ) -> None: + super().__init__(msg) + self.err_code = err_code + + +class InvalidKey(Exception): + pass diff --git a/venv/lib/python3.12/site-packages/cryptography/fernet.py b/venv/lib/python3.12/site-packages/cryptography/fernet.py new file mode 100644 index 00000000..c6744ae3 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/fernet.py @@ -0,0 +1,224 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import base64 +import binascii +import os +import time +import typing +from collections.abc import Iterable + +from cryptography import utils +from cryptography.exceptions import InvalidSignature +from cryptography.hazmat.primitives import hashes, padding +from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes +from cryptography.hazmat.primitives.hmac import HMAC + + +class InvalidToken(Exception): + pass + + +_MAX_CLOCK_SKEW = 60 + + +class Fernet: + def __init__( + self, + key: bytes | str, + backend: typing.Any = None, + ) -> None: + try: + key = base64.urlsafe_b64decode(key) + except binascii.Error as exc: + raise ValueError( + "Fernet key must be 32 url-safe base64-encoded bytes." + ) from exc + if len(key) != 32: + raise ValueError( + "Fernet key must be 32 url-safe base64-encoded bytes." + ) + + self._signing_key = key[:16] + self._encryption_key = key[16:] + + @classmethod + def generate_key(cls) -> bytes: + return base64.urlsafe_b64encode(os.urandom(32)) + + def encrypt(self, data: bytes) -> bytes: + return self.encrypt_at_time(data, int(time.time())) + + def encrypt_at_time(self, data: bytes, current_time: int) -> bytes: + iv = os.urandom(16) + return self._encrypt_from_parts(data, current_time, iv) + + def _encrypt_from_parts( + self, data: bytes, current_time: int, iv: bytes + ) -> bytes: + utils._check_bytes("data", data) + + padder = padding.PKCS7(algorithms.AES.block_size).padder() + padded_data = padder.update(data) + padder.finalize() + encryptor = Cipher( + algorithms.AES(self._encryption_key), + modes.CBC(iv), + ).encryptor() + ciphertext = encryptor.update(padded_data) + encryptor.finalize() + + basic_parts = ( + b"\x80" + + current_time.to_bytes(length=8, byteorder="big") + + iv + + ciphertext + ) + + h = HMAC(self._signing_key, hashes.SHA256()) + h.update(basic_parts) + hmac = h.finalize() + return base64.urlsafe_b64encode(basic_parts + hmac) + + def decrypt(self, token: bytes | str, ttl: int | None = None) -> bytes: + timestamp, data = Fernet._get_unverified_token_data(token) + if ttl is None: + time_info = None + else: + time_info = (ttl, int(time.time())) + return self._decrypt_data(data, timestamp, time_info) + + def decrypt_at_time( + self, token: bytes | str, ttl: int, current_time: int + ) -> bytes: + if ttl is None: + raise ValueError( + "decrypt_at_time() can only be used with a non-None ttl" + ) + timestamp, data = Fernet._get_unverified_token_data(token) + return self._decrypt_data(data, timestamp, (ttl, current_time)) + + def extract_timestamp(self, token: bytes | str) -> int: + timestamp, data = Fernet._get_unverified_token_data(token) + # Verify the token was not tampered with. + self._verify_signature(data) + return timestamp + + @staticmethod + def _get_unverified_token_data(token: bytes | str) -> tuple[int, bytes]: + if not isinstance(token, (str, bytes)): + raise TypeError("token must be bytes or str") + + try: + data = base64.urlsafe_b64decode(token) + except (TypeError, binascii.Error): + raise InvalidToken + + if not data or data[0] != 0x80: + raise InvalidToken + + if len(data) < 9: + raise InvalidToken + + timestamp = int.from_bytes(data[1:9], byteorder="big") + return timestamp, data + + def _verify_signature(self, data: bytes) -> None: + h = HMAC(self._signing_key, hashes.SHA256()) + h.update(data[:-32]) + try: + h.verify(data[-32:]) + except InvalidSignature: + raise InvalidToken + + def _decrypt_data( + self, + data: bytes, + timestamp: int, + time_info: tuple[int, int] | None, + ) -> bytes: + if time_info is not None: + ttl, current_time = time_info + if timestamp + ttl < current_time: + raise InvalidToken + + if current_time + _MAX_CLOCK_SKEW < timestamp: + raise InvalidToken + + self._verify_signature(data) + + iv = data[9:25] + ciphertext = data[25:-32] + decryptor = Cipher( + algorithms.AES(self._encryption_key), modes.CBC(iv) + ).decryptor() + plaintext_padded = decryptor.update(ciphertext) + try: + plaintext_padded += decryptor.finalize() + except ValueError: + raise InvalidToken + unpadder = padding.PKCS7(algorithms.AES.block_size).unpadder() + + unpadded = unpadder.update(plaintext_padded) + try: + unpadded += unpadder.finalize() + except ValueError: + raise InvalidToken + return unpadded + + +class MultiFernet: + def __init__(self, fernets: Iterable[Fernet]): + fernets = list(fernets) + if not fernets: + raise ValueError( + "MultiFernet requires at least one Fernet instance" + ) + self._fernets = fernets + + def encrypt(self, msg: bytes) -> bytes: + return self.encrypt_at_time(msg, int(time.time())) + + def encrypt_at_time(self, msg: bytes, current_time: int) -> bytes: + return self._fernets[0].encrypt_at_time(msg, current_time) + + def rotate(self, msg: bytes | str) -> bytes: + timestamp, data = Fernet._get_unverified_token_data(msg) + for f in self._fernets: + try: + p = f._decrypt_data(data, timestamp, None) + break + except InvalidToken: + pass + else: + raise InvalidToken + + iv = os.urandom(16) + return self._fernets[0]._encrypt_from_parts(p, timestamp, iv) + + def decrypt(self, msg: bytes | str, ttl: int | None = None) -> bytes: + for f in self._fernets: + try: + return f.decrypt(msg, ttl) + except InvalidToken: + pass + raise InvalidToken + + def decrypt_at_time( + self, msg: bytes | str, ttl: int, current_time: int + ) -> bytes: + for f in self._fernets: + try: + return f.decrypt_at_time(msg, ttl, current_time) + except InvalidToken: + pass + raise InvalidToken + + def extract_timestamp(self, msg: bytes | str) -> int: + for f in self._fernets: + try: + return f.extract_timestamp(msg) + except InvalidToken: + pass + raise InvalidToken diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/__init__.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/__init__.py new file mode 100644 index 00000000..b9f11870 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/__init__.py @@ -0,0 +1,13 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +""" +Hazardous Materials + +This is a "Hazardous Materials" module. You should ONLY use it if you're +100% absolutely sure that you know what you're doing because this module +is full of land mines, dragons, and dinosaurs with laser guns. +""" diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..2bfafaf3 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/__pycache__/_oid.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/__pycache__/_oid.cpython-312.pyc new file mode 100644 index 00000000..7d15a689 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/__pycache__/_oid.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/_oid.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/_oid.py new file mode 100644 index 00000000..4bf138d4 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/_oid.py @@ -0,0 +1,356 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.hazmat.bindings._rust import ( + ObjectIdentifier as ObjectIdentifier, +) +from cryptography.hazmat.primitives import hashes + + +class ExtensionOID: + SUBJECT_DIRECTORY_ATTRIBUTES = ObjectIdentifier("2.5.29.9") + SUBJECT_KEY_IDENTIFIER = ObjectIdentifier("2.5.29.14") + KEY_USAGE = ObjectIdentifier("2.5.29.15") + PRIVATE_KEY_USAGE_PERIOD = ObjectIdentifier("2.5.29.16") + SUBJECT_ALTERNATIVE_NAME = ObjectIdentifier("2.5.29.17") + ISSUER_ALTERNATIVE_NAME = ObjectIdentifier("2.5.29.18") + BASIC_CONSTRAINTS = ObjectIdentifier("2.5.29.19") + NAME_CONSTRAINTS = ObjectIdentifier("2.5.29.30") + CRL_DISTRIBUTION_POINTS = ObjectIdentifier("2.5.29.31") + CERTIFICATE_POLICIES = ObjectIdentifier("2.5.29.32") + POLICY_MAPPINGS = ObjectIdentifier("2.5.29.33") + AUTHORITY_KEY_IDENTIFIER = ObjectIdentifier("2.5.29.35") + POLICY_CONSTRAINTS = ObjectIdentifier("2.5.29.36") + EXTENDED_KEY_USAGE = ObjectIdentifier("2.5.29.37") + FRESHEST_CRL = ObjectIdentifier("2.5.29.46") + INHIBIT_ANY_POLICY = ObjectIdentifier("2.5.29.54") + ISSUING_DISTRIBUTION_POINT = ObjectIdentifier("2.5.29.28") + AUTHORITY_INFORMATION_ACCESS = ObjectIdentifier("1.3.6.1.5.5.7.1.1") + SUBJECT_INFORMATION_ACCESS = ObjectIdentifier("1.3.6.1.5.5.7.1.11") + OCSP_NO_CHECK = ObjectIdentifier("1.3.6.1.5.5.7.48.1.5") + TLS_FEATURE = ObjectIdentifier("1.3.6.1.5.5.7.1.24") + CRL_NUMBER = ObjectIdentifier("2.5.29.20") + DELTA_CRL_INDICATOR = ObjectIdentifier("2.5.29.27") + PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS = ObjectIdentifier( + "1.3.6.1.4.1.11129.2.4.2" + ) + PRECERT_POISON = ObjectIdentifier("1.3.6.1.4.1.11129.2.4.3") + SIGNED_CERTIFICATE_TIMESTAMPS = ObjectIdentifier("1.3.6.1.4.1.11129.2.4.5") + MS_CERTIFICATE_TEMPLATE = ObjectIdentifier("1.3.6.1.4.1.311.21.7") + ADMISSIONS = ObjectIdentifier("1.3.36.8.3.3") + + +class OCSPExtensionOID: + NONCE = ObjectIdentifier("1.3.6.1.5.5.7.48.1.2") + ACCEPTABLE_RESPONSES = ObjectIdentifier("1.3.6.1.5.5.7.48.1.4") + + +class CRLEntryExtensionOID: + CERTIFICATE_ISSUER = ObjectIdentifier("2.5.29.29") + CRL_REASON = ObjectIdentifier("2.5.29.21") + INVALIDITY_DATE = ObjectIdentifier("2.5.29.24") + + +class NameOID: + COMMON_NAME = ObjectIdentifier("2.5.4.3") + COUNTRY_NAME = ObjectIdentifier("2.5.4.6") + LOCALITY_NAME = ObjectIdentifier("2.5.4.7") + STATE_OR_PROVINCE_NAME = ObjectIdentifier("2.5.4.8") + STREET_ADDRESS = ObjectIdentifier("2.5.4.9") + ORGANIZATION_IDENTIFIER = ObjectIdentifier("2.5.4.97") + ORGANIZATION_NAME = ObjectIdentifier("2.5.4.10") + ORGANIZATIONAL_UNIT_NAME = ObjectIdentifier("2.5.4.11") + SERIAL_NUMBER = ObjectIdentifier("2.5.4.5") + SURNAME = ObjectIdentifier("2.5.4.4") + GIVEN_NAME = ObjectIdentifier("2.5.4.42") + TITLE = ObjectIdentifier("2.5.4.12") + INITIALS = ObjectIdentifier("2.5.4.43") + GENERATION_QUALIFIER = ObjectIdentifier("2.5.4.44") + X500_UNIQUE_IDENTIFIER = ObjectIdentifier("2.5.4.45") + DN_QUALIFIER = ObjectIdentifier("2.5.4.46") + PSEUDONYM = ObjectIdentifier("2.5.4.65") + USER_ID = ObjectIdentifier("0.9.2342.19200300.100.1.1") + DOMAIN_COMPONENT = ObjectIdentifier("0.9.2342.19200300.100.1.25") + EMAIL_ADDRESS = ObjectIdentifier("1.2.840.113549.1.9.1") + JURISDICTION_COUNTRY_NAME = ObjectIdentifier("1.3.6.1.4.1.311.60.2.1.3") + JURISDICTION_LOCALITY_NAME = ObjectIdentifier("1.3.6.1.4.1.311.60.2.1.1") + JURISDICTION_STATE_OR_PROVINCE_NAME = ObjectIdentifier( + "1.3.6.1.4.1.311.60.2.1.2" + ) + BUSINESS_CATEGORY = ObjectIdentifier("2.5.4.15") + POSTAL_ADDRESS = ObjectIdentifier("2.5.4.16") + POSTAL_CODE = ObjectIdentifier("2.5.4.17") + INN = ObjectIdentifier("1.2.643.3.131.1.1") + OGRN = ObjectIdentifier("1.2.643.100.1") + SNILS = ObjectIdentifier("1.2.643.100.3") + UNSTRUCTURED_NAME = ObjectIdentifier("1.2.840.113549.1.9.2") + + +class SignatureAlgorithmOID: + RSA_WITH_MD5 = ObjectIdentifier("1.2.840.113549.1.1.4") + RSA_WITH_SHA1 = ObjectIdentifier("1.2.840.113549.1.1.5") + # This is an alternate OID for RSA with SHA1 that is occasionally seen + _RSA_WITH_SHA1 = ObjectIdentifier("1.3.14.3.2.29") + RSA_WITH_SHA224 = ObjectIdentifier("1.2.840.113549.1.1.14") + RSA_WITH_SHA256 = ObjectIdentifier("1.2.840.113549.1.1.11") + RSA_WITH_SHA384 = ObjectIdentifier("1.2.840.113549.1.1.12") + RSA_WITH_SHA512 = ObjectIdentifier("1.2.840.113549.1.1.13") + RSA_WITH_SHA3_224 = ObjectIdentifier("2.16.840.1.101.3.4.3.13") + RSA_WITH_SHA3_256 = ObjectIdentifier("2.16.840.1.101.3.4.3.14") + RSA_WITH_SHA3_384 = ObjectIdentifier("2.16.840.1.101.3.4.3.15") + RSA_WITH_SHA3_512 = ObjectIdentifier("2.16.840.1.101.3.4.3.16") + RSASSA_PSS = ObjectIdentifier("1.2.840.113549.1.1.10") + ECDSA_WITH_SHA1 = ObjectIdentifier("1.2.840.10045.4.1") + ECDSA_WITH_SHA224 = ObjectIdentifier("1.2.840.10045.4.3.1") + ECDSA_WITH_SHA256 = ObjectIdentifier("1.2.840.10045.4.3.2") + ECDSA_WITH_SHA384 = ObjectIdentifier("1.2.840.10045.4.3.3") + ECDSA_WITH_SHA512 = ObjectIdentifier("1.2.840.10045.4.3.4") + ECDSA_WITH_SHA3_224 = ObjectIdentifier("2.16.840.1.101.3.4.3.9") + ECDSA_WITH_SHA3_256 = ObjectIdentifier("2.16.840.1.101.3.4.3.10") + ECDSA_WITH_SHA3_384 = ObjectIdentifier("2.16.840.1.101.3.4.3.11") + ECDSA_WITH_SHA3_512 = ObjectIdentifier("2.16.840.1.101.3.4.3.12") + DSA_WITH_SHA1 = ObjectIdentifier("1.2.840.10040.4.3") + DSA_WITH_SHA224 = ObjectIdentifier("2.16.840.1.101.3.4.3.1") + DSA_WITH_SHA256 = ObjectIdentifier("2.16.840.1.101.3.4.3.2") + DSA_WITH_SHA384 = ObjectIdentifier("2.16.840.1.101.3.4.3.3") + DSA_WITH_SHA512 = ObjectIdentifier("2.16.840.1.101.3.4.3.4") + ED25519 = ObjectIdentifier("1.3.101.112") + ED448 = ObjectIdentifier("1.3.101.113") + GOSTR3411_94_WITH_3410_2001 = ObjectIdentifier("1.2.643.2.2.3") + GOSTR3410_2012_WITH_3411_2012_256 = ObjectIdentifier("1.2.643.7.1.1.3.2") + GOSTR3410_2012_WITH_3411_2012_512 = ObjectIdentifier("1.2.643.7.1.1.3.3") + + +_SIG_OIDS_TO_HASH: dict[ObjectIdentifier, hashes.HashAlgorithm | None] = { + SignatureAlgorithmOID.RSA_WITH_MD5: hashes.MD5(), + SignatureAlgorithmOID.RSA_WITH_SHA1: hashes.SHA1(), + SignatureAlgorithmOID._RSA_WITH_SHA1: hashes.SHA1(), + SignatureAlgorithmOID.RSA_WITH_SHA224: hashes.SHA224(), + SignatureAlgorithmOID.RSA_WITH_SHA256: hashes.SHA256(), + SignatureAlgorithmOID.RSA_WITH_SHA384: hashes.SHA384(), + SignatureAlgorithmOID.RSA_WITH_SHA512: hashes.SHA512(), + SignatureAlgorithmOID.RSA_WITH_SHA3_224: hashes.SHA3_224(), + SignatureAlgorithmOID.RSA_WITH_SHA3_256: hashes.SHA3_256(), + SignatureAlgorithmOID.RSA_WITH_SHA3_384: hashes.SHA3_384(), + SignatureAlgorithmOID.RSA_WITH_SHA3_512: hashes.SHA3_512(), + SignatureAlgorithmOID.ECDSA_WITH_SHA1: hashes.SHA1(), + SignatureAlgorithmOID.ECDSA_WITH_SHA224: hashes.SHA224(), + SignatureAlgorithmOID.ECDSA_WITH_SHA256: hashes.SHA256(), + SignatureAlgorithmOID.ECDSA_WITH_SHA384: hashes.SHA384(), + SignatureAlgorithmOID.ECDSA_WITH_SHA512: hashes.SHA512(), + SignatureAlgorithmOID.ECDSA_WITH_SHA3_224: hashes.SHA3_224(), + SignatureAlgorithmOID.ECDSA_WITH_SHA3_256: hashes.SHA3_256(), + SignatureAlgorithmOID.ECDSA_WITH_SHA3_384: hashes.SHA3_384(), + SignatureAlgorithmOID.ECDSA_WITH_SHA3_512: hashes.SHA3_512(), + SignatureAlgorithmOID.DSA_WITH_SHA1: hashes.SHA1(), + SignatureAlgorithmOID.DSA_WITH_SHA224: hashes.SHA224(), + SignatureAlgorithmOID.DSA_WITH_SHA256: hashes.SHA256(), + SignatureAlgorithmOID.ED25519: None, + SignatureAlgorithmOID.ED448: None, + SignatureAlgorithmOID.GOSTR3411_94_WITH_3410_2001: None, + SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_256: None, + SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_512: None, +} + + +class HashAlgorithmOID: + SHA1 = ObjectIdentifier("1.3.14.3.2.26") + SHA224 = ObjectIdentifier("2.16.840.1.101.3.4.2.4") + SHA256 = ObjectIdentifier("2.16.840.1.101.3.4.2.1") + SHA384 = ObjectIdentifier("2.16.840.1.101.3.4.2.2") + SHA512 = ObjectIdentifier("2.16.840.1.101.3.4.2.3") + SHA3_224 = ObjectIdentifier("1.3.6.1.4.1.37476.3.2.1.99.7.224") + SHA3_256 = ObjectIdentifier("1.3.6.1.4.1.37476.3.2.1.99.7.256") + SHA3_384 = ObjectIdentifier("1.3.6.1.4.1.37476.3.2.1.99.7.384") + SHA3_512 = ObjectIdentifier("1.3.6.1.4.1.37476.3.2.1.99.7.512") + SHA3_224_NIST = ObjectIdentifier("2.16.840.1.101.3.4.2.7") + SHA3_256_NIST = ObjectIdentifier("2.16.840.1.101.3.4.2.8") + SHA3_384_NIST = ObjectIdentifier("2.16.840.1.101.3.4.2.9") + SHA3_512_NIST = ObjectIdentifier("2.16.840.1.101.3.4.2.10") + + +class PublicKeyAlgorithmOID: + DSA = ObjectIdentifier("1.2.840.10040.4.1") + EC_PUBLIC_KEY = ObjectIdentifier("1.2.840.10045.2.1") + RSAES_PKCS1_v1_5 = ObjectIdentifier("1.2.840.113549.1.1.1") + RSASSA_PSS = ObjectIdentifier("1.2.840.113549.1.1.10") + X25519 = ObjectIdentifier("1.3.101.110") + X448 = ObjectIdentifier("1.3.101.111") + ED25519 = ObjectIdentifier("1.3.101.112") + ED448 = ObjectIdentifier("1.3.101.113") + + +class ExtendedKeyUsageOID: + SERVER_AUTH = ObjectIdentifier("1.3.6.1.5.5.7.3.1") + CLIENT_AUTH = ObjectIdentifier("1.3.6.1.5.5.7.3.2") + CODE_SIGNING = ObjectIdentifier("1.3.6.1.5.5.7.3.3") + EMAIL_PROTECTION = ObjectIdentifier("1.3.6.1.5.5.7.3.4") + TIME_STAMPING = ObjectIdentifier("1.3.6.1.5.5.7.3.8") + OCSP_SIGNING = ObjectIdentifier("1.3.6.1.5.5.7.3.9") + ANY_EXTENDED_KEY_USAGE = ObjectIdentifier("2.5.29.37.0") + SMARTCARD_LOGON = ObjectIdentifier("1.3.6.1.4.1.311.20.2.2") + KERBEROS_PKINIT_KDC = ObjectIdentifier("1.3.6.1.5.2.3.5") + IPSEC_IKE = ObjectIdentifier("1.3.6.1.5.5.7.3.17") + BUNDLE_SECURITY = ObjectIdentifier("1.3.6.1.5.5.7.3.35") + CERTIFICATE_TRANSPARENCY = ObjectIdentifier("1.3.6.1.4.1.11129.2.4.4") + + +class OtherNameFormOID: + PERMANENT_IDENTIFIER = ObjectIdentifier("1.3.6.1.5.5.7.8.3") + HW_MODULE_NAME = ObjectIdentifier("1.3.6.1.5.5.7.8.4") + DNS_SRV = ObjectIdentifier("1.3.6.1.5.5.7.8.7") + NAI_REALM = ObjectIdentifier("1.3.6.1.5.5.7.8.8") + SMTP_UTF8_MAILBOX = ObjectIdentifier("1.3.6.1.5.5.7.8.9") + ACP_NODE_NAME = ObjectIdentifier("1.3.6.1.5.5.7.8.10") + BUNDLE_EID = ObjectIdentifier("1.3.6.1.5.5.7.8.11") + + +class AuthorityInformationAccessOID: + CA_ISSUERS = ObjectIdentifier("1.3.6.1.5.5.7.48.2") + OCSP = ObjectIdentifier("1.3.6.1.5.5.7.48.1") + + +class SubjectInformationAccessOID: + CA_REPOSITORY = ObjectIdentifier("1.3.6.1.5.5.7.48.5") + + +class CertificatePoliciesOID: + CPS_QUALIFIER = ObjectIdentifier("1.3.6.1.5.5.7.2.1") + CPS_USER_NOTICE = ObjectIdentifier("1.3.6.1.5.5.7.2.2") + ANY_POLICY = ObjectIdentifier("2.5.29.32.0") + + +class AttributeOID: + CHALLENGE_PASSWORD = ObjectIdentifier("1.2.840.113549.1.9.7") + UNSTRUCTURED_NAME = ObjectIdentifier("1.2.840.113549.1.9.2") + + +_OID_NAMES = { + NameOID.COMMON_NAME: "commonName", + NameOID.COUNTRY_NAME: "countryName", + NameOID.LOCALITY_NAME: "localityName", + NameOID.STATE_OR_PROVINCE_NAME: "stateOrProvinceName", + NameOID.STREET_ADDRESS: "streetAddress", + NameOID.ORGANIZATION_NAME: "organizationName", + NameOID.ORGANIZATIONAL_UNIT_NAME: "organizationalUnitName", + NameOID.SERIAL_NUMBER: "serialNumber", + NameOID.SURNAME: "surname", + NameOID.GIVEN_NAME: "givenName", + NameOID.TITLE: "title", + NameOID.GENERATION_QUALIFIER: "generationQualifier", + NameOID.X500_UNIQUE_IDENTIFIER: "x500UniqueIdentifier", + NameOID.DN_QUALIFIER: "dnQualifier", + NameOID.PSEUDONYM: "pseudonym", + NameOID.USER_ID: "userID", + NameOID.DOMAIN_COMPONENT: "domainComponent", + NameOID.EMAIL_ADDRESS: "emailAddress", + NameOID.JURISDICTION_COUNTRY_NAME: "jurisdictionCountryName", + NameOID.JURISDICTION_LOCALITY_NAME: "jurisdictionLocalityName", + NameOID.JURISDICTION_STATE_OR_PROVINCE_NAME: ( + "jurisdictionStateOrProvinceName" + ), + NameOID.BUSINESS_CATEGORY: "businessCategory", + NameOID.POSTAL_ADDRESS: "postalAddress", + NameOID.POSTAL_CODE: "postalCode", + NameOID.INN: "INN", + NameOID.OGRN: "OGRN", + NameOID.SNILS: "SNILS", + NameOID.UNSTRUCTURED_NAME: "unstructuredName", + SignatureAlgorithmOID.RSA_WITH_MD5: "md5WithRSAEncryption", + SignatureAlgorithmOID.RSA_WITH_SHA1: "sha1WithRSAEncryption", + SignatureAlgorithmOID.RSA_WITH_SHA224: "sha224WithRSAEncryption", + SignatureAlgorithmOID.RSA_WITH_SHA256: "sha256WithRSAEncryption", + SignatureAlgorithmOID.RSA_WITH_SHA384: "sha384WithRSAEncryption", + SignatureAlgorithmOID.RSA_WITH_SHA512: "sha512WithRSAEncryption", + SignatureAlgorithmOID.RSASSA_PSS: "rsassaPss", + SignatureAlgorithmOID.ECDSA_WITH_SHA1: "ecdsa-with-SHA1", + SignatureAlgorithmOID.ECDSA_WITH_SHA224: "ecdsa-with-SHA224", + SignatureAlgorithmOID.ECDSA_WITH_SHA256: "ecdsa-with-SHA256", + SignatureAlgorithmOID.ECDSA_WITH_SHA384: "ecdsa-with-SHA384", + SignatureAlgorithmOID.ECDSA_WITH_SHA512: "ecdsa-with-SHA512", + SignatureAlgorithmOID.DSA_WITH_SHA1: "dsa-with-sha1", + SignatureAlgorithmOID.DSA_WITH_SHA224: "dsa-with-sha224", + SignatureAlgorithmOID.DSA_WITH_SHA256: "dsa-with-sha256", + SignatureAlgorithmOID.ED25519: "ed25519", + SignatureAlgorithmOID.ED448: "ed448", + SignatureAlgorithmOID.GOSTR3411_94_WITH_3410_2001: ( + "GOST R 34.11-94 with GOST R 34.10-2001" + ), + SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_256: ( + "GOST R 34.10-2012 with GOST R 34.11-2012 (256 bit)" + ), + SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_512: ( + "GOST R 34.10-2012 with GOST R 34.11-2012 (512 bit)" + ), + HashAlgorithmOID.SHA1: "sha1", + HashAlgorithmOID.SHA224: "sha224", + HashAlgorithmOID.SHA256: "sha256", + HashAlgorithmOID.SHA384: "sha384", + HashAlgorithmOID.SHA512: "sha512", + HashAlgorithmOID.SHA3_224: "sha3_224", + HashAlgorithmOID.SHA3_256: "sha3_256", + HashAlgorithmOID.SHA3_384: "sha3_384", + HashAlgorithmOID.SHA3_512: "sha3_512", + HashAlgorithmOID.SHA3_224_NIST: "sha3_224", + HashAlgorithmOID.SHA3_256_NIST: "sha3_256", + HashAlgorithmOID.SHA3_384_NIST: "sha3_384", + HashAlgorithmOID.SHA3_512_NIST: "sha3_512", + PublicKeyAlgorithmOID.DSA: "dsaEncryption", + PublicKeyAlgorithmOID.EC_PUBLIC_KEY: "id-ecPublicKey", + PublicKeyAlgorithmOID.RSAES_PKCS1_v1_5: "rsaEncryption", + PublicKeyAlgorithmOID.X25519: "X25519", + PublicKeyAlgorithmOID.X448: "X448", + ExtendedKeyUsageOID.SERVER_AUTH: "serverAuth", + ExtendedKeyUsageOID.CLIENT_AUTH: "clientAuth", + ExtendedKeyUsageOID.CODE_SIGNING: "codeSigning", + ExtendedKeyUsageOID.EMAIL_PROTECTION: "emailProtection", + ExtendedKeyUsageOID.TIME_STAMPING: "timeStamping", + ExtendedKeyUsageOID.OCSP_SIGNING: "OCSPSigning", + ExtendedKeyUsageOID.SMARTCARD_LOGON: "msSmartcardLogin", + ExtendedKeyUsageOID.KERBEROS_PKINIT_KDC: "pkInitKDC", + ExtensionOID.SUBJECT_DIRECTORY_ATTRIBUTES: "subjectDirectoryAttributes", + ExtensionOID.SUBJECT_KEY_IDENTIFIER: "subjectKeyIdentifier", + ExtensionOID.KEY_USAGE: "keyUsage", + ExtensionOID.PRIVATE_KEY_USAGE_PERIOD: "privateKeyUsagePeriod", + ExtensionOID.SUBJECT_ALTERNATIVE_NAME: "subjectAltName", + ExtensionOID.ISSUER_ALTERNATIVE_NAME: "issuerAltName", + ExtensionOID.BASIC_CONSTRAINTS: "basicConstraints", + ExtensionOID.PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS: ( + "signedCertificateTimestampList" + ), + ExtensionOID.SIGNED_CERTIFICATE_TIMESTAMPS: ( + "signedCertificateTimestampList" + ), + ExtensionOID.PRECERT_POISON: "ctPoison", + ExtensionOID.MS_CERTIFICATE_TEMPLATE: "msCertificateTemplate", + ExtensionOID.ADMISSIONS: "Admissions", + CRLEntryExtensionOID.CRL_REASON: "cRLReason", + CRLEntryExtensionOID.INVALIDITY_DATE: "invalidityDate", + CRLEntryExtensionOID.CERTIFICATE_ISSUER: "certificateIssuer", + ExtensionOID.NAME_CONSTRAINTS: "nameConstraints", + ExtensionOID.CRL_DISTRIBUTION_POINTS: "cRLDistributionPoints", + ExtensionOID.CERTIFICATE_POLICIES: "certificatePolicies", + ExtensionOID.POLICY_MAPPINGS: "policyMappings", + ExtensionOID.AUTHORITY_KEY_IDENTIFIER: "authorityKeyIdentifier", + ExtensionOID.POLICY_CONSTRAINTS: "policyConstraints", + ExtensionOID.EXTENDED_KEY_USAGE: "extendedKeyUsage", + ExtensionOID.FRESHEST_CRL: "freshestCRL", + ExtensionOID.INHIBIT_ANY_POLICY: "inhibitAnyPolicy", + ExtensionOID.ISSUING_DISTRIBUTION_POINT: "issuingDistributionPoint", + ExtensionOID.AUTHORITY_INFORMATION_ACCESS: "authorityInfoAccess", + ExtensionOID.SUBJECT_INFORMATION_ACCESS: "subjectInfoAccess", + ExtensionOID.OCSP_NO_CHECK: "OCSPNoCheck", + ExtensionOID.CRL_NUMBER: "cRLNumber", + ExtensionOID.DELTA_CRL_INDICATOR: "deltaCRLIndicator", + ExtensionOID.TLS_FEATURE: "TLSFeature", + AuthorityInformationAccessOID.OCSP: "OCSP", + AuthorityInformationAccessOID.CA_ISSUERS: "caIssuers", + SubjectInformationAccessOID.CA_REPOSITORY: "caRepository", + CertificatePoliciesOID.CPS_QUALIFIER: "id-qt-cps", + CertificatePoliciesOID.CPS_USER_NOTICE: "id-qt-unotice", + OCSPExtensionOID.NONCE: "OCSPNonce", + AttributeOID.CHALLENGE_PASSWORD: "challengePassword", +} diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/asn1/__init__.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/asn1/__init__.py new file mode 100644 index 00000000..be683736 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/asn1/__init__.py @@ -0,0 +1,10 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from cryptography.hazmat.asn1.asn1 import encode_der, sequence + +__all__ = [ + "encode_der", + "sequence", +] diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/asn1/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/asn1/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..fd67ac28 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/asn1/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/asn1/__pycache__/asn1.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/asn1/__pycache__/asn1.cpython-312.pyc new file mode 100644 index 00000000..7b05ba78 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/asn1/__pycache__/asn1.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/asn1/asn1.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/asn1/asn1.py new file mode 100644 index 00000000..dedad6f2 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/asn1/asn1.py @@ -0,0 +1,116 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import dataclasses +import sys +import typing + +if sys.version_info < (3, 11): + import typing_extensions + + # We use the `include_extras` parameter of `get_type_hints`, which was + # added in Python 3.9. This can be replaced by the `typing` version + # once the min version is >= 3.9 + if sys.version_info < (3, 9): + get_type_hints = typing_extensions.get_type_hints + else: + get_type_hints = typing.get_type_hints +else: + get_type_hints = typing.get_type_hints + +from cryptography.hazmat.bindings._rust import declarative_asn1 + +T = typing.TypeVar("T", covariant=True) +U = typing.TypeVar("U") + + +encode_der = declarative_asn1.encode_der + + +def _normalize_field_type( + field_type: typing.Any, field_name: str +) -> declarative_asn1.AnnotatedType: + annotation = declarative_asn1.Annotation() + + if hasattr(field_type, "__asn1_root__"): + annotated_root = field_type.__asn1_root__ + if not isinstance(annotated_root, declarative_asn1.AnnotatedType): + raise TypeError(f"unsupported root type: {annotated_root}") + return annotated_root + else: + rust_field_type = declarative_asn1.non_root_python_to_rust(field_type) + + return declarative_asn1.AnnotatedType(rust_field_type, annotation) + + +def _annotate_fields( + raw_fields: dict[str, type], +) -> dict[str, declarative_asn1.AnnotatedType]: + fields = {} + for field_name, field_type in raw_fields.items(): + # Recursively normalize the field type into something that the + # Rust code can understand. + annotated_field_type = _normalize_field_type(field_type, field_name) + fields[field_name] = annotated_field_type + + return fields + + +def _register_asn1_sequence(cls: type[U]) -> None: + raw_fields = get_type_hints(cls, include_extras=True) + root = declarative_asn1.AnnotatedType( + declarative_asn1.Type.Sequence(cls, _annotate_fields(raw_fields)), + declarative_asn1.Annotation(), + ) + + setattr(cls, "__asn1_root__", root) + + +# Due to https://github.com/python/mypy/issues/19731, we can't define an alias +# for `dataclass_transform` that conditionally points to `typing` or +# `typing_extensions` depending on the Python version (like we do for +# `get_type_hints`). +# We work around it by making the whole decorated class conditional on the +# Python version. +if sys.version_info < (3, 11): + + @typing_extensions.dataclass_transform(kw_only_default=True) + def sequence(cls: type[U]) -> type[U]: + # We use `dataclasses.dataclass` to add an __init__ method + # to the class with keyword-only parameters. + if sys.version_info >= (3, 10): + dataclass_cls = dataclasses.dataclass( + repr=False, + eq=False, + # `match_args` was added in Python 3.10 and defaults + # to True + match_args=False, + # `kw_only` was added in Python 3.10 and defaults to + # False + kw_only=True, + )(cls) + else: + dataclass_cls = dataclasses.dataclass( + repr=False, + eq=False, + )(cls) + _register_asn1_sequence(dataclass_cls) + return dataclass_cls + +else: + + @typing.dataclass_transform(kw_only_default=True) + def sequence(cls: type[U]) -> type[U]: + # Only add an __init__ method, with keyword-only + # parameters. + dataclass_cls = dataclasses.dataclass( + repr=False, + eq=False, + match_args=False, + kw_only=True, + )(cls) + _register_asn1_sequence(dataclass_cls) + return dataclass_cls diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/__init__.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/__init__.py new file mode 100644 index 00000000..b4400aa0 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/__init__.py @@ -0,0 +1,13 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from typing import Any + + +def default_backend() -> Any: + from cryptography.hazmat.backends.openssl.backend import backend + + return backend diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..fed1884d Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__init__.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__init__.py new file mode 100644 index 00000000..51b04476 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__init__.py @@ -0,0 +1,9 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.hazmat.backends.openssl.backend import backend + +__all__ = ["backend"] diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..2fa9ab62 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-312.pyc new file mode 100644 index 00000000..b72dae93 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/backend.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/backend.py new file mode 100644 index 00000000..248b8c52 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/backend.py @@ -0,0 +1,302 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.bindings.openssl import binding +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives._asymmetric import AsymmetricPadding +from cryptography.hazmat.primitives.asymmetric import ec +from cryptography.hazmat.primitives.asymmetric import utils as asym_utils +from cryptography.hazmat.primitives.asymmetric.padding import ( + MGF1, + OAEP, + PSS, + PKCS1v15, +) +from cryptography.hazmat.primitives.ciphers import ( + CipherAlgorithm, +) +from cryptography.hazmat.primitives.ciphers.algorithms import ( + AES, +) +from cryptography.hazmat.primitives.ciphers.modes import ( + CBC, + Mode, +) + + +class Backend: + """ + OpenSSL API binding interfaces. + """ + + name = "openssl" + + # TripleDES encryption is disallowed/deprecated throughout 2023 in + # FIPS 140-3. To keep it simple we denylist any use of TripleDES (TDEA). + _fips_ciphers = (AES,) + # Sometimes SHA1 is still permissible. That logic is contained + # within the various *_supported methods. + _fips_hashes = ( + hashes.SHA224, + hashes.SHA256, + hashes.SHA384, + hashes.SHA512, + hashes.SHA512_224, + hashes.SHA512_256, + hashes.SHA3_224, + hashes.SHA3_256, + hashes.SHA3_384, + hashes.SHA3_512, + hashes.SHAKE128, + hashes.SHAKE256, + ) + _fips_ecdh_curves = ( + ec.SECP224R1, + ec.SECP256R1, + ec.SECP384R1, + ec.SECP521R1, + ) + _fips_rsa_min_key_size = 2048 + _fips_rsa_min_public_exponent = 65537 + _fips_dsa_min_modulus = 1 << 2048 + _fips_dh_min_key_size = 2048 + _fips_dh_min_modulus = 1 << _fips_dh_min_key_size + + def __init__(self) -> None: + self._binding = binding.Binding() + self._ffi = self._binding.ffi + self._lib = self._binding.lib + self._fips_enabled = rust_openssl.is_fips_enabled() + + def __repr__(self) -> str: + return ( + f"" + ) + + def openssl_assert(self, ok: bool) -> None: + return binding._openssl_assert(ok) + + def _enable_fips(self) -> None: + # This function enables FIPS mode for OpenSSL 3.0.0 on installs that + # have the FIPS provider installed properly. + rust_openssl.enable_fips(rust_openssl._providers) + assert rust_openssl.is_fips_enabled() + self._fips_enabled = rust_openssl.is_fips_enabled() + + def openssl_version_text(self) -> str: + """ + Friendly string name of the loaded OpenSSL library. This is not + necessarily the same version as it was compiled against. + + Example: OpenSSL 3.2.1 30 Jan 2024 + """ + return rust_openssl.openssl_version_text() + + def openssl_version_number(self) -> int: + return rust_openssl.openssl_version() + + def hash_supported(self, algorithm: hashes.HashAlgorithm) -> bool: + if self._fips_enabled and not isinstance(algorithm, self._fips_hashes): + return False + + return rust_openssl.hashes.hash_supported(algorithm) + + def signature_hash_supported( + self, algorithm: hashes.HashAlgorithm + ) -> bool: + # Dedicated check for hashing algorithm use in message digest for + # signatures, e.g. RSA PKCS#1 v1.5 SHA1 (sha1WithRSAEncryption). + if self._fips_enabled and isinstance(algorithm, hashes.SHA1): + return False + return self.hash_supported(algorithm) + + def scrypt_supported(self) -> bool: + if self._fips_enabled: + return False + else: + return hasattr(rust_openssl.kdf.Scrypt, "derive") + + def argon2_supported(self) -> bool: + if self._fips_enabled: + return False + else: + return hasattr(rust_openssl.kdf.Argon2id, "derive") + + def hmac_supported(self, algorithm: hashes.HashAlgorithm) -> bool: + # FIPS mode still allows SHA1 for HMAC + if self._fips_enabled and isinstance(algorithm, hashes.SHA1): + return True + if rust_openssl.CRYPTOGRAPHY_IS_AWSLC: + return isinstance( + algorithm, + ( + hashes.SHA1, + hashes.SHA224, + hashes.SHA256, + hashes.SHA384, + hashes.SHA512, + hashes.SHA512_224, + hashes.SHA512_256, + ), + ) + return self.hash_supported(algorithm) + + def cipher_supported(self, cipher: CipherAlgorithm, mode: Mode) -> bool: + if self._fips_enabled: + # FIPS mode requires AES. TripleDES is disallowed/deprecated in + # FIPS 140-3. + if not isinstance(cipher, self._fips_ciphers): + return False + + return rust_openssl.ciphers.cipher_supported(cipher, mode) + + def pbkdf2_hmac_supported(self, algorithm: hashes.HashAlgorithm) -> bool: + return self.hmac_supported(algorithm) + + def _consume_errors(self) -> list[rust_openssl.OpenSSLError]: + return rust_openssl.capture_error_stack() + + def _oaep_hash_supported(self, algorithm: hashes.HashAlgorithm) -> bool: + if self._fips_enabled and isinstance(algorithm, hashes.SHA1): + return False + + return isinstance( + algorithm, + ( + hashes.SHA1, + hashes.SHA224, + hashes.SHA256, + hashes.SHA384, + hashes.SHA512, + ), + ) + + def rsa_padding_supported(self, padding: AsymmetricPadding) -> bool: + if isinstance(padding, PKCS1v15): + return True + elif isinstance(padding, PSS) and isinstance(padding._mgf, MGF1): + # FIPS 186-4 only allows salt length == digest length for PSS + # It is technically acceptable to set an explicit salt length + # equal to the digest length and this will incorrectly fail, but + # since we don't do that in the tests and this method is + # private, we'll ignore that until we need to do otherwise. + if ( + self._fips_enabled + and padding._salt_length != PSS.DIGEST_LENGTH + ): + return False + return self.hash_supported(padding._mgf._algorithm) + elif isinstance(padding, OAEP) and isinstance(padding._mgf, MGF1): + return self._oaep_hash_supported( + padding._mgf._algorithm + ) and self._oaep_hash_supported(padding._algorithm) + else: + return False + + def rsa_encryption_supported(self, padding: AsymmetricPadding) -> bool: + if self._fips_enabled and isinstance(padding, PKCS1v15): + return False + else: + return self.rsa_padding_supported(padding) + + def dsa_supported(self) -> bool: + return ( + not rust_openssl.CRYPTOGRAPHY_IS_BORINGSSL + and not self._fips_enabled + ) + + def dsa_hash_supported(self, algorithm: hashes.HashAlgorithm) -> bool: + if not self.dsa_supported(): + return False + return self.signature_hash_supported(algorithm) + + def cmac_algorithm_supported(self, algorithm) -> bool: + return self.cipher_supported( + algorithm, CBC(b"\x00" * algorithm.block_size) + ) + + def elliptic_curve_supported(self, curve: ec.EllipticCurve) -> bool: + if self._fips_enabled and not isinstance( + curve, self._fips_ecdh_curves + ): + return False + + return rust_openssl.ec.curve_supported(curve) + + def elliptic_curve_signature_algorithm_supported( + self, + signature_algorithm: ec.EllipticCurveSignatureAlgorithm, + curve: ec.EllipticCurve, + ) -> bool: + # We only support ECDSA right now. + if not isinstance(signature_algorithm, ec.ECDSA): + return False + + return self.elliptic_curve_supported(curve) and ( + isinstance(signature_algorithm.algorithm, asym_utils.Prehashed) + or self.hash_supported(signature_algorithm.algorithm) + ) + + def elliptic_curve_exchange_algorithm_supported( + self, algorithm: ec.ECDH, curve: ec.EllipticCurve + ) -> bool: + return self.elliptic_curve_supported(curve) and isinstance( + algorithm, ec.ECDH + ) + + def dh_supported(self) -> bool: + return ( + not rust_openssl.CRYPTOGRAPHY_IS_BORINGSSL + and not rust_openssl.CRYPTOGRAPHY_IS_AWSLC + ) + + def dh_x942_serialization_supported(self) -> bool: + return self._lib.Cryptography_HAS_EVP_PKEY_DHX == 1 + + def x25519_supported(self) -> bool: + return not self._fips_enabled + + def x448_supported(self) -> bool: + if self._fips_enabled: + return False + return ( + not rust_openssl.CRYPTOGRAPHY_IS_LIBRESSL + and not rust_openssl.CRYPTOGRAPHY_IS_BORINGSSL + and not rust_openssl.CRYPTOGRAPHY_IS_AWSLC + ) + + def ed25519_supported(self) -> bool: + return not self._fips_enabled + + def ed448_supported(self) -> bool: + if self._fips_enabled: + return False + return ( + not rust_openssl.CRYPTOGRAPHY_IS_LIBRESSL + and not rust_openssl.CRYPTOGRAPHY_IS_BORINGSSL + and not rust_openssl.CRYPTOGRAPHY_IS_AWSLC + ) + + def ecdsa_deterministic_supported(self) -> bool: + return ( + rust_openssl.CRYPTOGRAPHY_OPENSSL_320_OR_GREATER + and not self._fips_enabled + ) + + def poly1305_supported(self) -> bool: + return not self._fips_enabled + + def pkcs7_supported(self) -> bool: + return ( + not rust_openssl.CRYPTOGRAPHY_IS_BORINGSSL + and not rust_openssl.CRYPTOGRAPHY_IS_AWSLC + ) + + +backend = Backend() diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/__init__.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/__init__.py new file mode 100644 index 00000000..b5093362 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/__init__.py @@ -0,0 +1,3 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..d842972b Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust.abi3.so b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust.abi3.so new file mode 100755 index 00000000..f0883977 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust.abi3.so differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/__init__.pyi b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/__init__.pyi new file mode 100644 index 00000000..2f4eef4e --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/__init__.pyi @@ -0,0 +1,37 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.hazmat.primitives import padding +from cryptography.utils import Buffer + +class PKCS7PaddingContext(padding.PaddingContext): + def __init__(self, block_size: int) -> None: ... + def update(self, data: Buffer) -> bytes: ... + def finalize(self) -> bytes: ... + +class ANSIX923PaddingContext(padding.PaddingContext): + def __init__(self, block_size: int) -> None: ... + def update(self, data: Buffer) -> bytes: ... + def finalize(self) -> bytes: ... + +class PKCS7UnpaddingContext(padding.PaddingContext): + def __init__(self, block_size: int) -> None: ... + def update(self, data: Buffer) -> bytes: ... + def finalize(self) -> bytes: ... + +class ANSIX923UnpaddingContext(padding.PaddingContext): + def __init__(self, block_size: int) -> None: ... + def update(self, data: Buffer) -> bytes: ... + def finalize(self) -> bytes: ... + +class ObjectIdentifier: + def __init__(self, value: str) -> None: ... + @property + def dotted_string(self) -> str: ... + @property + def _name(self) -> str: ... + +T = typing.TypeVar("T") diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/_openssl.pyi b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/_openssl.pyi new file mode 100644 index 00000000..80100082 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/_openssl.pyi @@ -0,0 +1,8 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +lib = typing.Any +ffi = typing.Any diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/asn1.pyi b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/asn1.pyi new file mode 100644 index 00000000..3b5f208e --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/asn1.pyi @@ -0,0 +1,7 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +def decode_dss_signature(signature: bytes) -> tuple[int, int]: ... +def encode_dss_signature(r: int, s: int) -> bytes: ... +def parse_spki_for_data(data: bytes) -> bytes: ... diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/declarative_asn1.pyi b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/declarative_asn1.pyi new file mode 100644 index 00000000..8563c11f --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/declarative_asn1.pyi @@ -0,0 +1,32 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +import typing + +def encode_der(value: typing.Any) -> bytes: ... +def non_root_python_to_rust(cls: type) -> Type: ... + +# Type is a Rust enum with tuple variants. For now, we express the type +# annotations like this: +class Type: + Sequence: typing.ClassVar[type] + PyInt: typing.ClassVar[type] + +class Annotation: + def __new__( + cls, + ) -> Annotation: ... + +class AnnotatedType: + inner: Type + annotation: Annotation + + def __new__(cls, inner: Type, annotation: Annotation) -> AnnotatedType: ... + +class AnnotatedTypeObject: + annotated_type: AnnotatedType + value: typing.Any + + def __new__( + cls, annotated_type: AnnotatedType, value: typing.Any + ) -> AnnotatedTypeObject: ... diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/exceptions.pyi b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/exceptions.pyi new file mode 100644 index 00000000..09f46b1e --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/exceptions.pyi @@ -0,0 +1,17 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +class _Reasons: + BACKEND_MISSING_INTERFACE: _Reasons + UNSUPPORTED_HASH: _Reasons + UNSUPPORTED_CIPHER: _Reasons + UNSUPPORTED_PADDING: _Reasons + UNSUPPORTED_MGF: _Reasons + UNSUPPORTED_PUBLIC_KEY_ALGORITHM: _Reasons + UNSUPPORTED_ELLIPTIC_CURVE: _Reasons + UNSUPPORTED_SERIALIZATION: _Reasons + UNSUPPORTED_X509: _Reasons + UNSUPPORTED_EXCHANGE_ALGORITHM: _Reasons + UNSUPPORTED_DIFFIE_HELLMAN: _Reasons + UNSUPPORTED_MAC: _Reasons diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/ocsp.pyi b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/ocsp.pyi new file mode 100644 index 00000000..103e96c1 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/ocsp.pyi @@ -0,0 +1,117 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import datetime +from collections.abc import Iterator + +from cryptography import x509 +from cryptography.hazmat.primitives import hashes, serialization +from cryptography.hazmat.primitives.asymmetric.types import PrivateKeyTypes +from cryptography.x509 import ocsp + +class OCSPRequest: + @property + def issuer_key_hash(self) -> bytes: ... + @property + def issuer_name_hash(self) -> bytes: ... + @property + def hash_algorithm(self) -> hashes.HashAlgorithm: ... + @property + def serial_number(self) -> int: ... + def public_bytes(self, encoding: serialization.Encoding) -> bytes: ... + @property + def extensions(self) -> x509.Extensions: ... + +class OCSPResponse: + @property + def responses(self) -> Iterator[OCSPSingleResponse]: ... + @property + def response_status(self) -> ocsp.OCSPResponseStatus: ... + @property + def signature_algorithm_oid(self) -> x509.ObjectIdentifier: ... + @property + def signature_hash_algorithm( + self, + ) -> hashes.HashAlgorithm | None: ... + @property + def signature(self) -> bytes: ... + @property + def tbs_response_bytes(self) -> bytes: ... + @property + def certificates(self) -> list[x509.Certificate]: ... + @property + def responder_key_hash(self) -> bytes | None: ... + @property + def responder_name(self) -> x509.Name | None: ... + @property + def produced_at(self) -> datetime.datetime: ... + @property + def produced_at_utc(self) -> datetime.datetime: ... + @property + def certificate_status(self) -> ocsp.OCSPCertStatus: ... + @property + def revocation_time(self) -> datetime.datetime | None: ... + @property + def revocation_time_utc(self) -> datetime.datetime | None: ... + @property + def revocation_reason(self) -> x509.ReasonFlags | None: ... + @property + def this_update(self) -> datetime.datetime: ... + @property + def this_update_utc(self) -> datetime.datetime: ... + @property + def next_update(self) -> datetime.datetime | None: ... + @property + def next_update_utc(self) -> datetime.datetime | None: ... + @property + def issuer_key_hash(self) -> bytes: ... + @property + def issuer_name_hash(self) -> bytes: ... + @property + def hash_algorithm(self) -> hashes.HashAlgorithm: ... + @property + def serial_number(self) -> int: ... + @property + def extensions(self) -> x509.Extensions: ... + @property + def single_extensions(self) -> x509.Extensions: ... + def public_bytes(self, encoding: serialization.Encoding) -> bytes: ... + +class OCSPSingleResponse: + @property + def certificate_status(self) -> ocsp.OCSPCertStatus: ... + @property + def revocation_time(self) -> datetime.datetime | None: ... + @property + def revocation_time_utc(self) -> datetime.datetime | None: ... + @property + def revocation_reason(self) -> x509.ReasonFlags | None: ... + @property + def this_update(self) -> datetime.datetime: ... + @property + def this_update_utc(self) -> datetime.datetime: ... + @property + def next_update(self) -> datetime.datetime | None: ... + @property + def next_update_utc(self) -> datetime.datetime | None: ... + @property + def issuer_key_hash(self) -> bytes: ... + @property + def issuer_name_hash(self) -> bytes: ... + @property + def hash_algorithm(self) -> hashes.HashAlgorithm: ... + @property + def serial_number(self) -> int: ... + +def load_der_ocsp_request(data: bytes) -> ocsp.OCSPRequest: ... +def load_der_ocsp_response(data: bytes) -> ocsp.OCSPResponse: ... +def create_ocsp_request( + builder: ocsp.OCSPRequestBuilder, +) -> ocsp.OCSPRequest: ... +def create_ocsp_response( + status: ocsp.OCSPResponseStatus, + builder: ocsp.OCSPResponseBuilder | None, + private_key: PrivateKeyTypes | None, + hash_algorithm: hashes.HashAlgorithm | None, +) -> ocsp.OCSPResponse: ... diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/__init__.pyi b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/__init__.pyi new file mode 100644 index 00000000..5fb3cb24 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/__init__.pyi @@ -0,0 +1,75 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.hazmat.bindings._rust.openssl import ( + aead, + ciphers, + cmac, + dh, + dsa, + ec, + ed448, + ed25519, + hashes, + hmac, + kdf, + keys, + poly1305, + rsa, + x448, + x25519, +) + +__all__ = [ + "aead", + "ciphers", + "cmac", + "dh", + "dsa", + "ec", + "ed448", + "ed25519", + "hashes", + "hmac", + "kdf", + "keys", + "openssl_version", + "openssl_version_text", + "poly1305", + "raise_openssl_error", + "rsa", + "x448", + "x25519", +] + +CRYPTOGRAPHY_IS_LIBRESSL: bool +CRYPTOGRAPHY_IS_BORINGSSL: bool +CRYPTOGRAPHY_IS_AWSLC: bool +CRYPTOGRAPHY_OPENSSL_300_OR_GREATER: bool +CRYPTOGRAPHY_OPENSSL_309_OR_GREATER: bool +CRYPTOGRAPHY_OPENSSL_320_OR_GREATER: bool +CRYPTOGRAPHY_OPENSSL_330_OR_GREATER: bool +CRYPTOGRAPHY_OPENSSL_350_OR_GREATER: bool + +class Providers: ... + +_legacy_provider_loaded: bool +_providers: Providers + +def openssl_version() -> int: ... +def openssl_version_text() -> str: ... +def raise_openssl_error() -> typing.NoReturn: ... +def capture_error_stack() -> list[OpenSSLError]: ... +def is_fips_enabled() -> bool: ... +def enable_fips(providers: Providers) -> None: ... + +class OpenSSLError: + @property + def lib(self) -> int: ... + @property + def reason(self) -> int: ... + @property + def reason_text(self) -> bytes: ... diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/aead.pyi b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/aead.pyi new file mode 100644 index 00000000..831fcd14 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/aead.pyi @@ -0,0 +1,107 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from collections.abc import Sequence + +from cryptography.utils import Buffer + +class AESGCM: + def __init__(self, key: Buffer) -> None: ... + @staticmethod + def generate_key(bit_length: int) -> bytes: ... + def encrypt( + self, + nonce: Buffer, + data: Buffer, + associated_data: Buffer | None, + ) -> bytes: ... + def decrypt( + self, + nonce: Buffer, + data: Buffer, + associated_data: Buffer | None, + ) -> bytes: ... + +class ChaCha20Poly1305: + def __init__(self, key: Buffer) -> None: ... + @staticmethod + def generate_key() -> bytes: ... + def encrypt( + self, + nonce: Buffer, + data: Buffer, + associated_data: Buffer | None, + ) -> bytes: ... + def decrypt( + self, + nonce: Buffer, + data: Buffer, + associated_data: Buffer | None, + ) -> bytes: ... + +class AESCCM: + def __init__(self, key: Buffer, tag_length: int = 16) -> None: ... + @staticmethod + def generate_key(bit_length: int) -> bytes: ... + def encrypt( + self, + nonce: Buffer, + data: Buffer, + associated_data: Buffer | None, + ) -> bytes: ... + def decrypt( + self, + nonce: Buffer, + data: Buffer, + associated_data: Buffer | None, + ) -> bytes: ... + +class AESSIV: + def __init__(self, key: Buffer) -> None: ... + @staticmethod + def generate_key(bit_length: int) -> bytes: ... + def encrypt( + self, + data: Buffer, + associated_data: Sequence[Buffer] | None, + ) -> bytes: ... + def decrypt( + self, + data: Buffer, + associated_data: Sequence[Buffer] | None, + ) -> bytes: ... + +class AESOCB3: + def __init__(self, key: Buffer) -> None: ... + @staticmethod + def generate_key(bit_length: int) -> bytes: ... + def encrypt( + self, + nonce: Buffer, + data: Buffer, + associated_data: Buffer | None, + ) -> bytes: ... + def decrypt( + self, + nonce: Buffer, + data: Buffer, + associated_data: Buffer | None, + ) -> bytes: ... + +class AESGCMSIV: + def __init__(self, key: Buffer) -> None: ... + @staticmethod + def generate_key(bit_length: int) -> bytes: ... + def encrypt( + self, + nonce: Buffer, + data: Buffer, + associated_data: Buffer | None, + ) -> bytes: ... + def decrypt( + self, + nonce: Buffer, + data: Buffer, + associated_data: Buffer | None, + ) -> bytes: ... diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/ciphers.pyi b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/ciphers.pyi new file mode 100644 index 00000000..a48fb017 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/ciphers.pyi @@ -0,0 +1,38 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.hazmat.primitives import ciphers +from cryptography.hazmat.primitives.ciphers import modes + +@typing.overload +def create_encryption_ctx( + algorithm: ciphers.CipherAlgorithm, mode: modes.ModeWithAuthenticationTag +) -> ciphers.AEADEncryptionContext: ... +@typing.overload +def create_encryption_ctx( + algorithm: ciphers.CipherAlgorithm, mode: modes.Mode | None +) -> ciphers.CipherContext: ... +@typing.overload +def create_decryption_ctx( + algorithm: ciphers.CipherAlgorithm, mode: modes.ModeWithAuthenticationTag +) -> ciphers.AEADDecryptionContext: ... +@typing.overload +def create_decryption_ctx( + algorithm: ciphers.CipherAlgorithm, mode: modes.Mode | None +) -> ciphers.CipherContext: ... +def cipher_supported( + algorithm: ciphers.CipherAlgorithm, mode: modes.Mode +) -> bool: ... +def _advance( + ctx: ciphers.AEADEncryptionContext | ciphers.AEADDecryptionContext, n: int +) -> None: ... +def _advance_aad( + ctx: ciphers.AEADEncryptionContext | ciphers.AEADDecryptionContext, n: int +) -> None: ... + +class CipherContext: ... +class AEADEncryptionContext: ... +class AEADDecryptionContext: ... diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/cmac.pyi b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/cmac.pyi new file mode 100644 index 00000000..9c03508b --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/cmac.pyi @@ -0,0 +1,18 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.hazmat.primitives import ciphers + +class CMAC: + def __init__( + self, + algorithm: ciphers.BlockCipherAlgorithm, + backend: typing.Any = None, + ) -> None: ... + def update(self, data: bytes) -> None: ... + def finalize(self) -> bytes: ... + def verify(self, signature: bytes) -> None: ... + def copy(self) -> CMAC: ... diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/dh.pyi b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/dh.pyi new file mode 100644 index 00000000..08733d74 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/dh.pyi @@ -0,0 +1,51 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.hazmat.primitives.asymmetric import dh + +MIN_MODULUS_SIZE: int + +class DHPrivateKey: ... +class DHPublicKey: ... +class DHParameters: ... + +class DHPrivateNumbers: + def __init__(self, x: int, public_numbers: DHPublicNumbers) -> None: ... + def private_key(self, backend: typing.Any = None) -> dh.DHPrivateKey: ... + @property + def x(self) -> int: ... + @property + def public_numbers(self) -> DHPublicNumbers: ... + +class DHPublicNumbers: + def __init__( + self, y: int, parameter_numbers: DHParameterNumbers + ) -> None: ... + def public_key(self, backend: typing.Any = None) -> dh.DHPublicKey: ... + @property + def y(self) -> int: ... + @property + def parameter_numbers(self) -> DHParameterNumbers: ... + +class DHParameterNumbers: + def __init__(self, p: int, g: int, q: int | None = None) -> None: ... + def parameters(self, backend: typing.Any = None) -> dh.DHParameters: ... + @property + def p(self) -> int: ... + @property + def g(self) -> int: ... + @property + def q(self) -> int | None: ... + +def generate_parameters( + generator: int, key_size: int, backend: typing.Any = None +) -> dh.DHParameters: ... +def from_pem_parameters( + data: bytes, backend: typing.Any = None +) -> dh.DHParameters: ... +def from_der_parameters( + data: bytes, backend: typing.Any = None +) -> dh.DHParameters: ... diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/dsa.pyi b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/dsa.pyi new file mode 100644 index 00000000..0922a4c4 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/dsa.pyi @@ -0,0 +1,41 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.hazmat.primitives.asymmetric import dsa + +class DSAPrivateKey: ... +class DSAPublicKey: ... +class DSAParameters: ... + +class DSAPrivateNumbers: + def __init__(self, x: int, public_numbers: DSAPublicNumbers) -> None: ... + @property + def x(self) -> int: ... + @property + def public_numbers(self) -> DSAPublicNumbers: ... + def private_key(self, backend: typing.Any = None) -> dsa.DSAPrivateKey: ... + +class DSAPublicNumbers: + def __init__( + self, y: int, parameter_numbers: DSAParameterNumbers + ) -> None: ... + @property + def y(self) -> int: ... + @property + def parameter_numbers(self) -> DSAParameterNumbers: ... + def public_key(self, backend: typing.Any = None) -> dsa.DSAPublicKey: ... + +class DSAParameterNumbers: + def __init__(self, p: int, q: int, g: int) -> None: ... + @property + def p(self) -> int: ... + @property + def q(self) -> int: ... + @property + def g(self) -> int: ... + def parameters(self, backend: typing.Any = None) -> dsa.DSAParameters: ... + +def generate_parameters(key_size: int) -> dsa.DSAParameters: ... diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/ec.pyi b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/ec.pyi new file mode 100644 index 00000000..5c3b7bf6 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/ec.pyi @@ -0,0 +1,52 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.hazmat.primitives.asymmetric import ec + +class ECPrivateKey: ... +class ECPublicKey: ... + +class EllipticCurvePrivateNumbers: + def __init__( + self, private_value: int, public_numbers: EllipticCurvePublicNumbers + ) -> None: ... + def private_key( + self, backend: typing.Any = None + ) -> ec.EllipticCurvePrivateKey: ... + @property + def private_value(self) -> int: ... + @property + def public_numbers(self) -> EllipticCurvePublicNumbers: ... + +class EllipticCurvePublicNumbers: + def __init__(self, x: int, y: int, curve: ec.EllipticCurve) -> None: ... + def public_key( + self, backend: typing.Any = None + ) -> ec.EllipticCurvePublicKey: ... + @property + def x(self) -> int: ... + @property + def y(self) -> int: ... + @property + def curve(self) -> ec.EllipticCurve: ... + def __eq__(self, other: object) -> bool: ... + +def curve_supported(curve: ec.EllipticCurve) -> bool: ... +def generate_private_key( + curve: ec.EllipticCurve, backend: typing.Any = None +) -> ec.EllipticCurvePrivateKey: ... +def from_private_numbers( + numbers: ec.EllipticCurvePrivateNumbers, +) -> ec.EllipticCurvePrivateKey: ... +def from_public_numbers( + numbers: ec.EllipticCurvePublicNumbers, +) -> ec.EllipticCurvePublicKey: ... +def from_public_bytes( + curve: ec.EllipticCurve, data: bytes +) -> ec.EllipticCurvePublicKey: ... +def derive_private_key( + private_value: int, curve: ec.EllipticCurve +) -> ec.EllipticCurvePrivateKey: ... diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/ed25519.pyi b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/ed25519.pyi new file mode 100644 index 00000000..f85b3d1b --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/ed25519.pyi @@ -0,0 +1,13 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from cryptography.hazmat.primitives.asymmetric import ed25519 +from cryptography.utils import Buffer + +class Ed25519PrivateKey: ... +class Ed25519PublicKey: ... + +def generate_key() -> ed25519.Ed25519PrivateKey: ... +def from_private_bytes(data: Buffer) -> ed25519.Ed25519PrivateKey: ... +def from_public_bytes(data: bytes) -> ed25519.Ed25519PublicKey: ... diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/ed448.pyi b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/ed448.pyi new file mode 100644 index 00000000..c8ca0ecb --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/ed448.pyi @@ -0,0 +1,13 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from cryptography.hazmat.primitives.asymmetric import ed448 +from cryptography.utils import Buffer + +class Ed448PrivateKey: ... +class Ed448PublicKey: ... + +def generate_key() -> ed448.Ed448PrivateKey: ... +def from_private_bytes(data: Buffer) -> ed448.Ed448PrivateKey: ... +def from_public_bytes(data: bytes) -> ed448.Ed448PublicKey: ... diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/hashes.pyi b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/hashes.pyi new file mode 100644 index 00000000..6bfd295a --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/hashes.pyi @@ -0,0 +1,28 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.hazmat.primitives import hashes +from cryptography.utils import Buffer + +class Hash(hashes.HashContext): + def __init__( + self, algorithm: hashes.HashAlgorithm, backend: typing.Any = None + ) -> None: ... + @property + def algorithm(self) -> hashes.HashAlgorithm: ... + def update(self, data: Buffer) -> None: ... + def finalize(self) -> bytes: ... + def copy(self) -> Hash: ... + +def hash_supported(algorithm: hashes.HashAlgorithm) -> bool: ... + +class XOFHash: + def __init__(self, algorithm: hashes.ExtendableOutputFunction) -> None: ... + @property + def algorithm(self) -> hashes.ExtendableOutputFunction: ... + def update(self, data: Buffer) -> None: ... + def squeeze(self, length: int) -> bytes: ... + def copy(self) -> XOFHash: ... diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/hmac.pyi b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/hmac.pyi new file mode 100644 index 00000000..3883d1b1 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/hmac.pyi @@ -0,0 +1,22 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.hazmat.primitives import hashes +from cryptography.utils import Buffer + +class HMAC(hashes.HashContext): + def __init__( + self, + key: Buffer, + algorithm: hashes.HashAlgorithm, + backend: typing.Any = None, + ) -> None: ... + @property + def algorithm(self) -> hashes.HashAlgorithm: ... + def update(self, data: Buffer) -> None: ... + def finalize(self) -> bytes: ... + def verify(self, signature: bytes) -> None: ... + def copy(self) -> HMAC: ... diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/kdf.pyi b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/kdf.pyi new file mode 100644 index 00000000..9e2d8d99 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/kdf.pyi @@ -0,0 +1,72 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.hazmat.primitives.hashes import HashAlgorithm +from cryptography.utils import Buffer + +def derive_pbkdf2_hmac( + key_material: Buffer, + algorithm: HashAlgorithm, + salt: bytes, + iterations: int, + length: int, +) -> bytes: ... + +class Scrypt: + def __init__( + self, + salt: bytes, + length: int, + n: int, + r: int, + p: int, + backend: typing.Any = None, + ) -> None: ... + def derive(self, key_material: Buffer) -> bytes: ... + def verify(self, key_material: bytes, expected_key: bytes) -> None: ... + +class Argon2id: + def __init__( + self, + *, + salt: bytes, + length: int, + iterations: int, + lanes: int, + memory_cost: int, + ad: bytes | None = None, + secret: bytes | None = None, + ) -> None: ... + def derive(self, key_material: bytes) -> bytes: ... + def verify(self, key_material: bytes, expected_key: bytes) -> None: ... + def derive_phc_encoded(self, key_material: bytes) -> str: ... + @classmethod + def verify_phc_encoded( + cls, key_material: bytes, phc_encoded: str, secret: bytes | None = None + ) -> None: ... + +class HKDF: + def __init__( + self, + algorithm: HashAlgorithm, + length: int, + salt: bytes | None, + info: bytes | None, + backend: typing.Any = None, + ): ... + def derive(self, key_material: Buffer) -> bytes: ... + def verify(self, key_material: bytes, expected_key: bytes) -> None: ... + +class HKDFExpand: + def __init__( + self, + algorithm: HashAlgorithm, + length: int, + info: bytes | None, + backend: typing.Any = None, + ): ... + def derive(self, key_material: Buffer) -> bytes: ... + def verify(self, key_material: bytes, expected_key: bytes) -> None: ... diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/keys.pyi b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/keys.pyi new file mode 100644 index 00000000..404057e0 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/keys.pyi @@ -0,0 +1,34 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.hazmat.primitives.asymmetric.types import ( + PrivateKeyTypes, + PublicKeyTypes, +) +from cryptography.utils import Buffer + +def load_der_private_key( + data: Buffer, + password: bytes | None, + backend: typing.Any = None, + *, + unsafe_skip_rsa_key_validation: bool = False, +) -> PrivateKeyTypes: ... +def load_pem_private_key( + data: Buffer, + password: bytes | None, + backend: typing.Any = None, + *, + unsafe_skip_rsa_key_validation: bool = False, +) -> PrivateKeyTypes: ... +def load_der_public_key( + data: bytes, + backend: typing.Any = None, +) -> PublicKeyTypes: ... +def load_pem_public_key( + data: bytes, + backend: typing.Any = None, +) -> PublicKeyTypes: ... diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/poly1305.pyi b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/poly1305.pyi new file mode 100644 index 00000000..45a2a39f --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/poly1305.pyi @@ -0,0 +1,15 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from cryptography.utils import Buffer + +class Poly1305: + def __init__(self, key: Buffer) -> None: ... + @staticmethod + def generate_tag(key: Buffer, data: Buffer) -> bytes: ... + @staticmethod + def verify_tag(key: Buffer, data: Buffer, tag: bytes) -> None: ... + def update(self, data: Buffer) -> None: ... + def finalize(self) -> bytes: ... + def verify(self, tag: bytes) -> None: ... diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/rsa.pyi b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/rsa.pyi new file mode 100644 index 00000000..ef7752dd --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/rsa.pyi @@ -0,0 +1,55 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.hazmat.primitives.asymmetric import rsa + +class RSAPrivateKey: ... +class RSAPublicKey: ... + +class RSAPrivateNumbers: + def __init__( + self, + p: int, + q: int, + d: int, + dmp1: int, + dmq1: int, + iqmp: int, + public_numbers: RSAPublicNumbers, + ) -> None: ... + @property + def p(self) -> int: ... + @property + def q(self) -> int: ... + @property + def d(self) -> int: ... + @property + def dmp1(self) -> int: ... + @property + def dmq1(self) -> int: ... + @property + def iqmp(self) -> int: ... + @property + def public_numbers(self) -> RSAPublicNumbers: ... + def private_key( + self, + backend: typing.Any = None, + *, + unsafe_skip_rsa_key_validation: bool = False, + ) -> rsa.RSAPrivateKey: ... + +class RSAPublicNumbers: + def __init__(self, e: int, n: int) -> None: ... + @property + def n(self) -> int: ... + @property + def e(self) -> int: ... + def public_key(self, backend: typing.Any = None) -> rsa.RSAPublicKey: ... + +def generate_private_key( + public_exponent: int, + key_size: int, +) -> rsa.RSAPrivateKey: ... diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/x25519.pyi b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/x25519.pyi new file mode 100644 index 00000000..38d2addd --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/x25519.pyi @@ -0,0 +1,13 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from cryptography.hazmat.primitives.asymmetric import x25519 +from cryptography.utils import Buffer + +class X25519PrivateKey: ... +class X25519PublicKey: ... + +def generate_key() -> x25519.X25519PrivateKey: ... +def from_private_bytes(data: Buffer) -> x25519.X25519PrivateKey: ... +def from_public_bytes(data: bytes) -> x25519.X25519PublicKey: ... diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/x448.pyi b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/x448.pyi new file mode 100644 index 00000000..3ac09809 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/x448.pyi @@ -0,0 +1,13 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from cryptography.hazmat.primitives.asymmetric import x448 +from cryptography.utils import Buffer + +class X448PrivateKey: ... +class X448PublicKey: ... + +def generate_key() -> x448.X448PrivateKey: ... +def from_private_bytes(data: Buffer) -> x448.X448PrivateKey: ... +def from_public_bytes(data: bytes) -> x448.X448PublicKey: ... diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/pkcs12.pyi b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/pkcs12.pyi new file mode 100644 index 00000000..b25becb6 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/pkcs12.pyi @@ -0,0 +1,52 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing +from collections.abc import Iterable + +from cryptography import x509 +from cryptography.hazmat.primitives.asymmetric.types import PrivateKeyTypes +from cryptography.hazmat.primitives.serialization import ( + KeySerializationEncryption, +) +from cryptography.hazmat.primitives.serialization.pkcs12 import ( + PKCS12KeyAndCertificates, + PKCS12PrivateKeyTypes, +) +from cryptography.utils import Buffer + +class PKCS12Certificate: + def __init__( + self, cert: x509.Certificate, friendly_name: bytes | None + ) -> None: ... + @property + def friendly_name(self) -> bytes | None: ... + @property + def certificate(self) -> x509.Certificate: ... + +def load_key_and_certificates( + data: Buffer, + password: Buffer | None, + backend: typing.Any = None, +) -> tuple[ + PrivateKeyTypes | None, + x509.Certificate | None, + list[x509.Certificate], +]: ... +def load_pkcs12( + data: bytes, + password: bytes | None, + backend: typing.Any = None, +) -> PKCS12KeyAndCertificates: ... +def serialize_java_truststore( + certs: Iterable[PKCS12Certificate], + encryption_algorithm: KeySerializationEncryption, +) -> bytes: ... +def serialize_key_and_certificates( + name: bytes | None, + key: PKCS12PrivateKeyTypes | None, + cert: x509.Certificate | None, + cas: Iterable[x509.Certificate | PKCS12Certificate] | None, + encryption_algorithm: KeySerializationEncryption, +) -> bytes: ... diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/pkcs7.pyi b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/pkcs7.pyi new file mode 100644 index 00000000..358b1358 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/pkcs7.pyi @@ -0,0 +1,50 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from collections.abc import Iterable + +from cryptography import x509 +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.asymmetric import rsa +from cryptography.hazmat.primitives.serialization import pkcs7 + +def serialize_certificates( + certs: list[x509.Certificate], + encoding: serialization.Encoding, +) -> bytes: ... +def encrypt_and_serialize( + builder: pkcs7.PKCS7EnvelopeBuilder, + content_encryption_algorithm: pkcs7.ContentEncryptionAlgorithm, + encoding: serialization.Encoding, + options: Iterable[pkcs7.PKCS7Options], +) -> bytes: ... +def sign_and_serialize( + builder: pkcs7.PKCS7SignatureBuilder, + encoding: serialization.Encoding, + options: Iterable[pkcs7.PKCS7Options], +) -> bytes: ... +def decrypt_der( + data: bytes, + certificate: x509.Certificate, + private_key: rsa.RSAPrivateKey, + options: Iterable[pkcs7.PKCS7Options], +) -> bytes: ... +def decrypt_pem( + data: bytes, + certificate: x509.Certificate, + private_key: rsa.RSAPrivateKey, + options: Iterable[pkcs7.PKCS7Options], +) -> bytes: ... +def decrypt_smime( + data: bytes, + certificate: x509.Certificate, + private_key: rsa.RSAPrivateKey, + options: Iterable[pkcs7.PKCS7Options], +) -> bytes: ... +def load_pem_pkcs7_certificates( + data: bytes, +) -> list[x509.Certificate]: ... +def load_der_pkcs7_certificates( + data: bytes, +) -> list[x509.Certificate]: ... diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/test_support.pyi b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/test_support.pyi new file mode 100644 index 00000000..c6c6d0bb --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/test_support.pyi @@ -0,0 +1,23 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from cryptography import x509 +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.serialization import pkcs7 +from cryptography.utils import Buffer + +class TestCertificate: + not_after_tag: int + not_before_tag: int + issuer_value_tags: list[int] + subject_value_tags: list[int] + +def test_parse_certificate(data: bytes) -> TestCertificate: ... +def pkcs7_verify( + encoding: serialization.Encoding, + sig: bytes, + msg: Buffer | None, + certs: list[x509.Certificate], + options: list[pkcs7.PKCS7Options], +) -> None: ... diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/x509.pyi b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/x509.pyi new file mode 100644 index 00000000..83c3441b --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/x509.pyi @@ -0,0 +1,301 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import datetime +import typing +from collections.abc import Iterator + +from cryptography import x509 +from cryptography.hazmat.primitives import hashes, serialization +from cryptography.hazmat.primitives.asymmetric.ec import ECDSA +from cryptography.hazmat.primitives.asymmetric.padding import PSS, PKCS1v15 +from cryptography.hazmat.primitives.asymmetric.types import ( + CertificateIssuerPublicKeyTypes, + CertificatePublicKeyTypes, + PrivateKeyTypes, +) +from cryptography.x509 import certificate_transparency + +def load_pem_x509_certificate( + data: bytes, backend: typing.Any = None +) -> x509.Certificate: ... +def load_der_x509_certificate( + data: bytes, backend: typing.Any = None +) -> x509.Certificate: ... +def load_pem_x509_certificates( + data: bytes, +) -> list[x509.Certificate]: ... +def load_pem_x509_crl( + data: bytes, backend: typing.Any = None +) -> x509.CertificateRevocationList: ... +def load_der_x509_crl( + data: bytes, backend: typing.Any = None +) -> x509.CertificateRevocationList: ... +def load_pem_x509_csr( + data: bytes, backend: typing.Any = None +) -> x509.CertificateSigningRequest: ... +def load_der_x509_csr( + data: bytes, backend: typing.Any = None +) -> x509.CertificateSigningRequest: ... +def encode_name_bytes(name: x509.Name) -> bytes: ... +def encode_extension_value(extension: x509.ExtensionType) -> bytes: ... +def create_x509_certificate( + builder: x509.CertificateBuilder, + private_key: PrivateKeyTypes, + hash_algorithm: hashes.HashAlgorithm | None, + rsa_padding: PKCS1v15 | PSS | None, + ecdsa_deterministic: bool | None, +) -> x509.Certificate: ... +def create_x509_csr( + builder: x509.CertificateSigningRequestBuilder, + private_key: PrivateKeyTypes, + hash_algorithm: hashes.HashAlgorithm | None, + rsa_padding: PKCS1v15 | PSS | None, + ecdsa_deterministic: bool | None, +) -> x509.CertificateSigningRequest: ... +def create_x509_crl( + builder: x509.CertificateRevocationListBuilder, + private_key: PrivateKeyTypes, + hash_algorithm: hashes.HashAlgorithm | None, + rsa_padding: PKCS1v15 | PSS | None, + ecdsa_deterministic: bool | None, +) -> x509.CertificateRevocationList: ... + +class Sct: + @property + def version(self) -> certificate_transparency.Version: ... + @property + def log_id(self) -> bytes: ... + @property + def timestamp(self) -> datetime.datetime: ... + @property + def entry_type(self) -> certificate_transparency.LogEntryType: ... + @property + def signature_hash_algorithm(self) -> hashes.HashAlgorithm: ... + @property + def signature_algorithm( + self, + ) -> certificate_transparency.SignatureAlgorithm: ... + @property + def signature(self) -> bytes: ... + @property + def extension_bytes(self) -> bytes: ... + +class Certificate: + def fingerprint(self, algorithm: hashes.HashAlgorithm) -> bytes: ... + @property + def serial_number(self) -> int: ... + @property + def version(self) -> x509.Version: ... + def public_key(self) -> CertificatePublicKeyTypes: ... + @property + def public_key_algorithm_oid(self) -> x509.ObjectIdentifier: ... + @property + def not_valid_before(self) -> datetime.datetime: ... + @property + def not_valid_before_utc(self) -> datetime.datetime: ... + @property + def not_valid_after(self) -> datetime.datetime: ... + @property + def not_valid_after_utc(self) -> datetime.datetime: ... + @property + def issuer(self) -> x509.Name: ... + @property + def subject(self) -> x509.Name: ... + @property + def signature_hash_algorithm( + self, + ) -> hashes.HashAlgorithm | None: ... + @property + def signature_algorithm_oid(self) -> x509.ObjectIdentifier: ... + @property + def signature_algorithm_parameters( + self, + ) -> PSS | PKCS1v15 | ECDSA | None: ... + @property + def extensions(self) -> x509.Extensions: ... + @property + def signature(self) -> bytes: ... + @property + def tbs_certificate_bytes(self) -> bytes: ... + @property + def tbs_precertificate_bytes(self) -> bytes: ... + def __eq__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... + def public_bytes(self, encoding: serialization.Encoding) -> bytes: ... + def verify_directly_issued_by(self, issuer: Certificate) -> None: ... + +class RevokedCertificate: ... + +class CertificateRevocationList: + def public_bytes(self, encoding: serialization.Encoding) -> bytes: ... + def fingerprint(self, algorithm: hashes.HashAlgorithm) -> bytes: ... + def get_revoked_certificate_by_serial_number( + self, serial_number: int + ) -> x509.RevokedCertificate | None: ... + @property + def signature_hash_algorithm( + self, + ) -> hashes.HashAlgorithm | None: ... + @property + def signature_algorithm_oid(self) -> x509.ObjectIdentifier: ... + @property + def signature_algorithm_parameters( + self, + ) -> PSS | PKCS1v15 | ECDSA | None: ... + @property + def issuer(self) -> x509.Name: ... + @property + def next_update(self) -> datetime.datetime | None: ... + @property + def next_update_utc(self) -> datetime.datetime | None: ... + @property + def last_update(self) -> datetime.datetime: ... + @property + def last_update_utc(self) -> datetime.datetime: ... + @property + def extensions(self) -> x509.Extensions: ... + @property + def signature(self) -> bytes: ... + @property + def tbs_certlist_bytes(self) -> bytes: ... + def __eq__(self, other: object) -> bool: ... + def __len__(self) -> int: ... + @typing.overload + def __getitem__(self, idx: int) -> x509.RevokedCertificate: ... + @typing.overload + def __getitem__(self, idx: slice) -> list[x509.RevokedCertificate]: ... + def __iter__(self) -> Iterator[x509.RevokedCertificate]: ... + def is_signature_valid( + self, public_key: CertificateIssuerPublicKeyTypes + ) -> bool: ... + +class CertificateSigningRequest: + def __eq__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... + def public_key(self) -> CertificatePublicKeyTypes: ... + @property + def subject(self) -> x509.Name: ... + @property + def signature_hash_algorithm( + self, + ) -> hashes.HashAlgorithm | None: ... + @property + def signature_algorithm_oid(self) -> x509.ObjectIdentifier: ... + @property + def signature_algorithm_parameters( + self, + ) -> PSS | PKCS1v15 | ECDSA | None: ... + @property + def extensions(self) -> x509.Extensions: ... + @property + def attributes(self) -> x509.Attributes: ... + def public_bytes(self, encoding: serialization.Encoding) -> bytes: ... + @property + def signature(self) -> bytes: ... + @property + def tbs_certrequest_bytes(self) -> bytes: ... + @property + def is_signature_valid(self) -> bool: ... + +class PolicyBuilder: + def time(self, time: datetime.datetime) -> PolicyBuilder: ... + def store(self, store: Store) -> PolicyBuilder: ... + def max_chain_depth(self, max_chain_depth: int) -> PolicyBuilder: ... + def extension_policies( + self, *, ca_policy: ExtensionPolicy, ee_policy: ExtensionPolicy + ) -> PolicyBuilder: ... + def build_client_verifier(self) -> ClientVerifier: ... + def build_server_verifier( + self, subject: x509.verification.Subject + ) -> ServerVerifier: ... + +class Policy: + @property + def max_chain_depth(self) -> int: ... + @property + def subject(self) -> x509.verification.Subject | None: ... + @property + def validation_time(self) -> datetime.datetime: ... + @property + def extended_key_usage(self) -> x509.ObjectIdentifier: ... + @property + def minimum_rsa_modulus(self) -> int: ... + +class Criticality: + CRITICAL: Criticality + AGNOSTIC: Criticality + NON_CRITICAL: Criticality + +T = typing.TypeVar("T", contravariant=True, bound=x509.ExtensionType) + +MaybeExtensionValidatorCallback = typing.Callable[ + [ + Policy, + x509.Certificate, + T | None, + ], + None, +] + +PresentExtensionValidatorCallback = typing.Callable[ + [Policy, x509.Certificate, T], + None, +] + +class ExtensionPolicy: + @staticmethod + def permit_all() -> ExtensionPolicy: ... + @staticmethod + def webpki_defaults_ca() -> ExtensionPolicy: ... + @staticmethod + def webpki_defaults_ee() -> ExtensionPolicy: ... + def require_not_present( + self, extension_type: type[x509.ExtensionType] + ) -> ExtensionPolicy: ... + def may_be_present( + self, + extension_type: type[T], + criticality: Criticality, + validator: MaybeExtensionValidatorCallback[T] | None, + ) -> ExtensionPolicy: ... + def require_present( + self, + extension_type: type[T], + criticality: Criticality, + validator: PresentExtensionValidatorCallback[T] | None, + ) -> ExtensionPolicy: ... + +class VerifiedClient: + @property + def subjects(self) -> list[x509.GeneralName] | None: ... + @property + def chain(self) -> list[x509.Certificate]: ... + +class ClientVerifier: + @property + def policy(self) -> Policy: ... + @property + def store(self) -> Store: ... + def verify( + self, + leaf: x509.Certificate, + intermediates: list[x509.Certificate], + ) -> VerifiedClient: ... + +class ServerVerifier: + @property + def policy(self) -> Policy: ... + @property + def store(self) -> Store: ... + def verify( + self, + leaf: x509.Certificate, + intermediates: list[x509.Certificate], + ) -> list[x509.Certificate]: ... + +class Store: + def __init__(self, certs: list[x509.Certificate]) -> None: ... + +class VerificationError(Exception): ... diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/__init__.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/__init__.py new file mode 100644 index 00000000..b5093362 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/__init__.py @@ -0,0 +1,3 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..554aac7d Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-312.pyc new file mode 100644 index 00000000..6c7b80e2 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-312.pyc new file mode 100644 index 00000000..8ded7074 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/_conditional.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/_conditional.py new file mode 100644 index 00000000..063bcf5b --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/_conditional.py @@ -0,0 +1,207 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + + +def cryptography_has_set_cert_cb() -> list[str]: + return [ + "SSL_CTX_set_cert_cb", + "SSL_set_cert_cb", + ] + + +def cryptography_has_ssl_st() -> list[str]: + return [ + "SSL_ST_BEFORE", + "SSL_ST_OK", + "SSL_ST_INIT", + "SSL_ST_RENEGOTIATE", + ] + + +def cryptography_has_tls_st() -> list[str]: + return [ + "TLS_ST_BEFORE", + "TLS_ST_OK", + ] + + +def cryptography_has_ssl_sigalgs() -> list[str]: + return [ + "SSL_CTX_set1_sigalgs_list", + ] + + +def cryptography_has_psk() -> list[str]: + return [ + "SSL_CTX_use_psk_identity_hint", + "SSL_CTX_set_psk_server_callback", + "SSL_CTX_set_psk_client_callback", + ] + + +def cryptography_has_psk_tlsv13() -> list[str]: + return [ + "SSL_CTX_set_psk_find_session_callback", + "SSL_CTX_set_psk_use_session_callback", + "Cryptography_SSL_SESSION_new", + "SSL_CIPHER_find", + "SSL_SESSION_set1_master_key", + "SSL_SESSION_set_cipher", + "SSL_SESSION_set_protocol_version", + ] + + +def cryptography_has_custom_ext() -> list[str]: + return [ + "SSL_CTX_add_client_custom_ext", + "SSL_CTX_add_server_custom_ext", + "SSL_extension_supported", + ] + + +def cryptography_has_tlsv13_functions() -> list[str]: + return [ + "SSL_CTX_set_ciphersuites", + ] + + +def cryptography_has_tlsv13_hs_functions() -> list[str]: + return [ + "SSL_VERIFY_POST_HANDSHAKE", + "SSL_verify_client_post_handshake", + "SSL_CTX_set_post_handshake_auth", + "SSL_set_post_handshake_auth", + "SSL_SESSION_get_max_early_data", + "SSL_write_early_data", + "SSL_read_early_data", + "SSL_CTX_set_max_early_data", + ] + + +def cryptography_has_ssl_verify_client_post_handshake() -> list[str]: + return [ + "SSL_verify_client_post_handshake", + ] + + +def cryptography_has_engine() -> list[str]: + return [ + "ENGINE_by_id", + "ENGINE_init", + "ENGINE_finish", + "ENGINE_get_default_RAND", + "ENGINE_set_default_RAND", + "ENGINE_unregister_RAND", + "ENGINE_ctrl_cmd", + "ENGINE_free", + "ENGINE_get_name", + "ENGINE_ctrl_cmd_string", + "ENGINE_load_builtin_engines", + "ENGINE_load_private_key", + "ENGINE_load_public_key", + "SSL_CTX_set_client_cert_engine", + ] + + +def cryptography_has_verified_chain() -> list[str]: + return [ + "SSL_get0_verified_chain", + ] + + +def cryptography_has_srtp() -> list[str]: + return [ + "SSL_CTX_set_tlsext_use_srtp", + "SSL_set_tlsext_use_srtp", + "SSL_get_selected_srtp_profile", + ] + + +def cryptography_has_op_no_renegotiation() -> list[str]: + return [ + "SSL_OP_NO_RENEGOTIATION", + ] + + +def cryptography_has_dtls_get_data_mtu() -> list[str]: + return [ + "DTLS_get_data_mtu", + ] + + +def cryptography_has_ssl_cookie() -> list[str]: + return [ + "SSL_OP_COOKIE_EXCHANGE", + "DTLSv1_listen", + "SSL_CTX_set_cookie_generate_cb", + "SSL_CTX_set_cookie_verify_cb", + ] + + +def cryptography_has_prime_checks() -> list[str]: + return [ + "BN_prime_checks_for_size", + ] + + +def cryptography_has_unexpected_eof_while_reading() -> list[str]: + return ["SSL_R_UNEXPECTED_EOF_WHILE_READING"] + + +def cryptography_has_ssl_op_ignore_unexpected_eof() -> list[str]: + return [ + "SSL_OP_IGNORE_UNEXPECTED_EOF", + ] + + +def cryptography_has_get_extms_support() -> list[str]: + return ["SSL_get_extms_support"] + + +def cryptography_has_ssl_get0_group_name() -> list[str]: + return ["SSL_get0_group_name"] + + +# This is a mapping of +# {condition: function-returning-names-dependent-on-that-condition} so we can +# loop over them and delete unsupported names at runtime. It will be removed +# when cffi supports #if in cdef. We use functions instead of just a dict of +# lists so we can use coverage to measure which are used. +CONDITIONAL_NAMES = { + "Cryptography_HAS_SET_CERT_CB": cryptography_has_set_cert_cb, + "Cryptography_HAS_SSL_ST": cryptography_has_ssl_st, + "Cryptography_HAS_TLS_ST": cryptography_has_tls_st, + "Cryptography_HAS_SIGALGS": cryptography_has_ssl_sigalgs, + "Cryptography_HAS_PSK": cryptography_has_psk, + "Cryptography_HAS_PSK_TLSv1_3": cryptography_has_psk_tlsv13, + "Cryptography_HAS_CUSTOM_EXT": cryptography_has_custom_ext, + "Cryptography_HAS_TLSv1_3_FUNCTIONS": cryptography_has_tlsv13_functions, + "Cryptography_HAS_TLSv1_3_HS_FUNCTIONS": ( + cryptography_has_tlsv13_hs_functions + ), + "Cryptography_HAS_SSL_VERIFY_CLIENT_POST_HANDSHAKE": ( + cryptography_has_ssl_verify_client_post_handshake + ), + "Cryptography_HAS_ENGINE": cryptography_has_engine, + "Cryptography_HAS_VERIFIED_CHAIN": cryptography_has_verified_chain, + "Cryptography_HAS_SRTP": cryptography_has_srtp, + "Cryptography_HAS_OP_NO_RENEGOTIATION": ( + cryptography_has_op_no_renegotiation + ), + "Cryptography_HAS_DTLS_GET_DATA_MTU": cryptography_has_dtls_get_data_mtu, + "Cryptography_HAS_SSL_COOKIE": cryptography_has_ssl_cookie, + "Cryptography_HAS_PRIME_CHECKS": cryptography_has_prime_checks, + "Cryptography_HAS_UNEXPECTED_EOF_WHILE_READING": ( + cryptography_has_unexpected_eof_while_reading + ), + "Cryptography_HAS_SSL_OP_IGNORE_UNEXPECTED_EOF": ( + cryptography_has_ssl_op_ignore_unexpected_eof + ), + "Cryptography_HAS_GET_EXTMS_SUPPORT": cryptography_has_get_extms_support, + "Cryptography_HAS_SSL_GET0_GROUP_NAME": ( + cryptography_has_ssl_get0_group_name + ), +} diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/binding.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/binding.py new file mode 100644 index 00000000..4494c71e --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/binding.py @@ -0,0 +1,137 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import os +import sys +import threading +import types +import typing +import warnings +from collections.abc import Callable + +import cryptography +from cryptography.exceptions import InternalError +from cryptography.hazmat.bindings._rust import _openssl, openssl +from cryptography.hazmat.bindings.openssl._conditional import CONDITIONAL_NAMES +from cryptography.utils import CryptographyDeprecationWarning + + +def _openssl_assert(ok: bool) -> None: + if not ok: + errors = openssl.capture_error_stack() + + raise InternalError( + "Unknown OpenSSL error. This error is commonly encountered when " + "another library is not cleaning up the OpenSSL error stack. If " + "you are using cryptography with another library that uses " + "OpenSSL try disabling it before reporting a bug. Otherwise " + "please file an issue at https://github.com/pyca/cryptography/" + "issues with information on how to reproduce " + f"this. ({errors!r})", + errors, + ) + + +def build_conditional_library( + lib: typing.Any, + conditional_names: dict[str, Callable[[], list[str]]], +) -> typing.Any: + conditional_lib = types.ModuleType("lib") + conditional_lib._original_lib = lib # type: ignore[attr-defined] + excluded_names = set() + for condition, names_cb in conditional_names.items(): + if not getattr(lib, condition): + excluded_names.update(names_cb()) + + for attr in dir(lib): + if attr not in excluded_names: + setattr(conditional_lib, attr, getattr(lib, attr)) + + return conditional_lib + + +class Binding: + """ + OpenSSL API wrapper. + """ + + lib: typing.ClassVar[typing.Any] = None + ffi = _openssl.ffi + _lib_loaded = False + _init_lock = threading.Lock() + + def __init__(self) -> None: + self._ensure_ffi_initialized() + + @classmethod + def _ensure_ffi_initialized(cls) -> None: + with cls._init_lock: + if not cls._lib_loaded: + cls.lib = build_conditional_library( + _openssl.lib, CONDITIONAL_NAMES + ) + cls._lib_loaded = True + + @classmethod + def init_static_locks(cls) -> None: + cls._ensure_ffi_initialized() + + +def _verify_package_version(version: str) -> None: + # Occasionally we run into situations where the version of the Python + # package does not match the version of the shared object that is loaded. + # This may occur in environments where multiple versions of cryptography + # are installed and available in the python path. To avoid errors cropping + # up later this code checks that the currently imported package and the + # shared object that were loaded have the same version and raise an + # ImportError if they do not + so_package_version = _openssl.ffi.string( + _openssl.lib.CRYPTOGRAPHY_PACKAGE_VERSION + ) + if version.encode("ascii") != so_package_version: + raise ImportError( + "The version of cryptography does not match the loaded " + "shared object. This can happen if you have multiple copies of " + "cryptography installed in your Python path. Please try creating " + "a new virtual environment to resolve this issue. " + f"Loaded python version: {version}, " + f"shared object version: {so_package_version}" + ) + + _openssl_assert( + _openssl.lib.OpenSSL_version_num() == openssl.openssl_version(), + ) + + +_verify_package_version(cryptography.__version__) + +Binding.init_static_locks() + +if ( + sys.platform == "win32" + and os.environ.get("PROCESSOR_ARCHITEW6432") is not None +): + warnings.warn( + "You are using cryptography on a 32-bit Python on a 64-bit Windows " + "Operating System. Cryptography will be significantly faster if you " + "switch to using a 64-bit Python.", + UserWarning, + stacklevel=2, + ) + +if ( + not openssl.CRYPTOGRAPHY_IS_LIBRESSL + and not openssl.CRYPTOGRAPHY_IS_BORINGSSL + and not openssl.CRYPTOGRAPHY_IS_AWSLC + and not openssl.CRYPTOGRAPHY_OPENSSL_300_OR_GREATER +): + warnings.warn( + "You are using OpenSSL < 3.0. Support for OpenSSL < 3.0 is deprecated " + "and will be removed in the next release. Please upgrade to OpenSSL " + "3.0 or later.", + CryptographyDeprecationWarning, + stacklevel=2, + ) diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/decrepit/__init__.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/decrepit/__init__.py new file mode 100644 index 00000000..41d73186 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/decrepit/__init__.py @@ -0,0 +1,5 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/decrepit/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/decrepit/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..652201c7 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/decrepit/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/decrepit/ciphers/__init__.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/decrepit/ciphers/__init__.py new file mode 100644 index 00000000..41d73186 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/decrepit/ciphers/__init__.py @@ -0,0 +1,5 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/decrepit/ciphers/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/decrepit/ciphers/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..7468aac7 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/decrepit/ciphers/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/decrepit/ciphers/__pycache__/algorithms.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/decrepit/ciphers/__pycache__/algorithms.cpython-312.pyc new file mode 100644 index 00000000..2bbd8f74 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/decrepit/ciphers/__pycache__/algorithms.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/decrepit/ciphers/algorithms.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/decrepit/ciphers/algorithms.py new file mode 100644 index 00000000..072a9914 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/decrepit/ciphers/algorithms.py @@ -0,0 +1,112 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.hazmat.primitives._cipheralgorithm import ( + BlockCipherAlgorithm, + CipherAlgorithm, + _verify_key_size, +) + + +class ARC4(CipherAlgorithm): + name = "RC4" + key_sizes = frozenset([40, 56, 64, 80, 128, 160, 192, 256]) + + def __init__(self, key: bytes): + self.key = _verify_key_size(self, key) + + @property + def key_size(self) -> int: + return len(self.key) * 8 + + +class TripleDES(BlockCipherAlgorithm): + name = "3DES" + block_size = 64 + key_sizes = frozenset([64, 128, 192]) + + def __init__(self, key: bytes): + if len(key) == 8: + key += key + key + elif len(key) == 16: + key += key[:8] + self.key = _verify_key_size(self, key) + + @property + def key_size(self) -> int: + return len(self.key) * 8 + + +# Not actually supported, marker for tests +class _DES: + key_size = 64 + + +class Blowfish(BlockCipherAlgorithm): + name = "Blowfish" + block_size = 64 + key_sizes = frozenset(range(32, 449, 8)) + + def __init__(self, key: bytes): + self.key = _verify_key_size(self, key) + + @property + def key_size(self) -> int: + return len(self.key) * 8 + + +class CAST5(BlockCipherAlgorithm): + name = "CAST5" + block_size = 64 + key_sizes = frozenset(range(40, 129, 8)) + + def __init__(self, key: bytes): + self.key = _verify_key_size(self, key) + + @property + def key_size(self) -> int: + return len(self.key) * 8 + + +class SEED(BlockCipherAlgorithm): + name = "SEED" + block_size = 128 + key_sizes = frozenset([128]) + + def __init__(self, key: bytes): + self.key = _verify_key_size(self, key) + + @property + def key_size(self) -> int: + return len(self.key) * 8 + + +class IDEA(BlockCipherAlgorithm): + name = "IDEA" + block_size = 64 + key_sizes = frozenset([128]) + + def __init__(self, key: bytes): + self.key = _verify_key_size(self, key) + + @property + def key_size(self) -> int: + return len(self.key) * 8 + + +# This class only allows RC2 with a 128-bit key. No support for +# effective key bits or other key sizes is provided. +class RC2(BlockCipherAlgorithm): + name = "RC2" + block_size = 64 + key_sizes = frozenset([128]) + + def __init__(self, key: bytes): + self.key = _verify_key_size(self, key) + + @property + def key_size(self) -> int: + return len(self.key) * 8 diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__init__.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__init__.py new file mode 100644 index 00000000..b5093362 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__init__.py @@ -0,0 +1,3 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..ae5ea20b Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/_asymmetric.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/_asymmetric.cpython-312.pyc new file mode 100644 index 00000000..aa584fac Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/_asymmetric.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/_cipheralgorithm.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/_cipheralgorithm.cpython-312.pyc new file mode 100644 index 00000000..53310861 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/_cipheralgorithm.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/_serialization.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/_serialization.cpython-312.pyc new file mode 100644 index 00000000..b02ed22e Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/_serialization.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/cmac.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/cmac.cpython-312.pyc new file mode 100644 index 00000000..f2167bea Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/cmac.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/constant_time.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/constant_time.cpython-312.pyc new file mode 100644 index 00000000..3984a1b6 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/constant_time.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/hashes.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/hashes.cpython-312.pyc new file mode 100644 index 00000000..98cbd7fe Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/hashes.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/hmac.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/hmac.cpython-312.pyc new file mode 100644 index 00000000..8657fb07 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/hmac.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/keywrap.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/keywrap.cpython-312.pyc new file mode 100644 index 00000000..5af30b10 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/keywrap.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/padding.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/padding.cpython-312.pyc new file mode 100644 index 00000000..19cb6456 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/padding.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/poly1305.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/poly1305.cpython-312.pyc new file mode 100644 index 00000000..bb4cb763 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/poly1305.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/_asymmetric.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/_asymmetric.py new file mode 100644 index 00000000..ea55ffdf --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/_asymmetric.py @@ -0,0 +1,19 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc + +# This exists to break an import cycle. It is normally accessible from the +# asymmetric padding module. + + +class AsymmetricPadding(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def name(self) -> str: + """ + A string naming this padding (e.g. "PSS", "PKCS1"). + """ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/_cipheralgorithm.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/_cipheralgorithm.py new file mode 100644 index 00000000..305a9fd3 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/_cipheralgorithm.py @@ -0,0 +1,60 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc + +from cryptography import utils + +# This exists to break an import cycle. It is normally accessible from the +# ciphers module. + + +class CipherAlgorithm(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def name(self) -> str: + """ + A string naming this mode (e.g. "AES", "Camellia"). + """ + + @property + @abc.abstractmethod + def key_sizes(self) -> frozenset[int]: + """ + Valid key sizes for this algorithm in bits + """ + + @property + @abc.abstractmethod + def key_size(self) -> int: + """ + The size of the key being used as an integer in bits (e.g. 128, 256). + """ + + +class BlockCipherAlgorithm(CipherAlgorithm): + key: utils.Buffer + + @property + @abc.abstractmethod + def block_size(self) -> int: + """ + The size of a block as an integer in bits (e.g. 64, 128). + """ + + +def _verify_key_size( + algorithm: CipherAlgorithm, key: utils.Buffer +) -> utils.Buffer: + # Verify that the key is instance of bytes + utils._check_byteslike("key", key) + + # Verify that the key size matches the expected key size + if len(key) * 8 not in algorithm.key_sizes: + raise ValueError( + f"Invalid key size ({len(key) * 8}) for {algorithm.name}." + ) + return key diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/_serialization.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/_serialization.py new file mode 100644 index 00000000..e998865c --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/_serialization.py @@ -0,0 +1,168 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc + +from cryptography import utils +from cryptography.hazmat.primitives.hashes import HashAlgorithm + +# This exists to break an import cycle. These classes are normally accessible +# from the serialization module. + + +class PBES(utils.Enum): + PBESv1SHA1And3KeyTripleDESCBC = "PBESv1 using SHA1 and 3-Key TripleDES" + PBESv2SHA256AndAES256CBC = "PBESv2 using SHA256 PBKDF2 and AES256 CBC" + + +class Encoding(utils.Enum): + PEM = "PEM" + DER = "DER" + OpenSSH = "OpenSSH" + Raw = "Raw" + X962 = "ANSI X9.62" + SMIME = "S/MIME" + + +class PrivateFormat(utils.Enum): + PKCS8 = "PKCS8" + TraditionalOpenSSL = "TraditionalOpenSSL" + Raw = "Raw" + OpenSSH = "OpenSSH" + PKCS12 = "PKCS12" + + def encryption_builder(self) -> KeySerializationEncryptionBuilder: + if self not in (PrivateFormat.OpenSSH, PrivateFormat.PKCS12): + raise ValueError( + "encryption_builder only supported with PrivateFormat.OpenSSH" + " and PrivateFormat.PKCS12" + ) + return KeySerializationEncryptionBuilder(self) + + +class PublicFormat(utils.Enum): + SubjectPublicKeyInfo = "X.509 subjectPublicKeyInfo with PKCS#1" + PKCS1 = "Raw PKCS#1" + OpenSSH = "OpenSSH" + Raw = "Raw" + CompressedPoint = "X9.62 Compressed Point" + UncompressedPoint = "X9.62 Uncompressed Point" + + +class ParameterFormat(utils.Enum): + PKCS3 = "PKCS3" + + +class KeySerializationEncryption(metaclass=abc.ABCMeta): + pass + + +class BestAvailableEncryption(KeySerializationEncryption): + def __init__(self, password: bytes): + if not isinstance(password, bytes) or len(password) == 0: + raise ValueError("Password must be 1 or more bytes.") + + self.password = password + + +class NoEncryption(KeySerializationEncryption): + pass + + +class KeySerializationEncryptionBuilder: + def __init__( + self, + format: PrivateFormat, + *, + _kdf_rounds: int | None = None, + _hmac_hash: HashAlgorithm | None = None, + _key_cert_algorithm: PBES | None = None, + ) -> None: + self._format = format + + self._kdf_rounds = _kdf_rounds + self._hmac_hash = _hmac_hash + self._key_cert_algorithm = _key_cert_algorithm + + def kdf_rounds(self, rounds: int) -> KeySerializationEncryptionBuilder: + if self._kdf_rounds is not None: + raise ValueError("kdf_rounds already set") + + if not isinstance(rounds, int): + raise TypeError("kdf_rounds must be an integer") + + if rounds < 1: + raise ValueError("kdf_rounds must be a positive integer") + + return KeySerializationEncryptionBuilder( + self._format, + _kdf_rounds=rounds, + _hmac_hash=self._hmac_hash, + _key_cert_algorithm=self._key_cert_algorithm, + ) + + def hmac_hash( + self, algorithm: HashAlgorithm + ) -> KeySerializationEncryptionBuilder: + if self._format is not PrivateFormat.PKCS12: + raise TypeError( + "hmac_hash only supported with PrivateFormat.PKCS12" + ) + + if self._hmac_hash is not None: + raise ValueError("hmac_hash already set") + return KeySerializationEncryptionBuilder( + self._format, + _kdf_rounds=self._kdf_rounds, + _hmac_hash=algorithm, + _key_cert_algorithm=self._key_cert_algorithm, + ) + + def key_cert_algorithm( + self, algorithm: PBES + ) -> KeySerializationEncryptionBuilder: + if self._format is not PrivateFormat.PKCS12: + raise TypeError( + "key_cert_algorithm only supported with PrivateFormat.PKCS12" + ) + if self._key_cert_algorithm is not None: + raise ValueError("key_cert_algorithm already set") + return KeySerializationEncryptionBuilder( + self._format, + _kdf_rounds=self._kdf_rounds, + _hmac_hash=self._hmac_hash, + _key_cert_algorithm=algorithm, + ) + + def build(self, password: bytes) -> KeySerializationEncryption: + if not isinstance(password, bytes) or len(password) == 0: + raise ValueError("Password must be 1 or more bytes.") + + return _KeySerializationEncryption( + self._format, + password, + kdf_rounds=self._kdf_rounds, + hmac_hash=self._hmac_hash, + key_cert_algorithm=self._key_cert_algorithm, + ) + + +class _KeySerializationEncryption(KeySerializationEncryption): + def __init__( + self, + format: PrivateFormat, + password: bytes, + *, + kdf_rounds: int | None, + hmac_hash: HashAlgorithm | None, + key_cert_algorithm: PBES | None, + ): + self._format = format + self.password = password + + self._kdf_rounds = kdf_rounds + self._hmac_hash = hmac_hash + self._key_cert_algorithm = key_cert_algorithm diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__init__.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__init__.py new file mode 100644 index 00000000..b5093362 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__init__.py @@ -0,0 +1,3 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..5360d01a Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-312.pyc new file mode 100644 index 00000000..c3f24aff Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-312.pyc new file mode 100644 index 00000000..b7310d4f Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-312.pyc new file mode 100644 index 00000000..5c4ea562 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed25519.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed25519.cpython-312.pyc new file mode 100644 index 00000000..c131dcf8 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed25519.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed448.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed448.cpython-312.pyc new file mode 100644 index 00000000..130e70d7 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed448.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-312.pyc new file mode 100644 index 00000000..5d9514b1 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-312.pyc new file mode 100644 index 00000000..a2243aa7 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/types.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/types.cpython-312.pyc new file mode 100644 index 00000000..3992bb72 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/types.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-312.pyc new file mode 100644 index 00000000..4e1440dd Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x25519.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x25519.cpython-312.pyc new file mode 100644 index 00000000..aa0afcfc Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x25519.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x448.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x448.cpython-312.pyc new file mode 100644 index 00000000..b72a818c Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x448.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/dh.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/dh.py new file mode 100644 index 00000000..1822e99d --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/dh.py @@ -0,0 +1,147 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc + +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.primitives import _serialization + +generate_parameters = rust_openssl.dh.generate_parameters + + +DHPrivateNumbers = rust_openssl.dh.DHPrivateNumbers +DHPublicNumbers = rust_openssl.dh.DHPublicNumbers +DHParameterNumbers = rust_openssl.dh.DHParameterNumbers + + +class DHParameters(metaclass=abc.ABCMeta): + @abc.abstractmethod + def generate_private_key(self) -> DHPrivateKey: + """ + Generates and returns a DHPrivateKey. + """ + + @abc.abstractmethod + def parameter_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.ParameterFormat, + ) -> bytes: + """ + Returns the parameters serialized as bytes. + """ + + @abc.abstractmethod + def parameter_numbers(self) -> DHParameterNumbers: + """ + Returns a DHParameterNumbers. + """ + + +DHParametersWithSerialization = DHParameters +DHParameters.register(rust_openssl.dh.DHParameters) + + +class DHPublicKey(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def key_size(self) -> int: + """ + The bit length of the prime modulus. + """ + + @abc.abstractmethod + def parameters(self) -> DHParameters: + """ + The DHParameters object associated with this public key. + """ + + @abc.abstractmethod + def public_numbers(self) -> DHPublicNumbers: + """ + Returns a DHPublicNumbers. + """ + + @abc.abstractmethod + def public_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PublicFormat, + ) -> bytes: + """ + Returns the key serialized as bytes. + """ + + @abc.abstractmethod + def __eq__(self, other: object) -> bool: + """ + Checks equality. + """ + + @abc.abstractmethod + def __copy__(self) -> DHPublicKey: + """ + Returns a copy. + """ + + +DHPublicKeyWithSerialization = DHPublicKey +DHPublicKey.register(rust_openssl.dh.DHPublicKey) + + +class DHPrivateKey(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def key_size(self) -> int: + """ + The bit length of the prime modulus. + """ + + @abc.abstractmethod + def public_key(self) -> DHPublicKey: + """ + The DHPublicKey associated with this private key. + """ + + @abc.abstractmethod + def parameters(self) -> DHParameters: + """ + The DHParameters object associated with this private key. + """ + + @abc.abstractmethod + def exchange(self, peer_public_key: DHPublicKey) -> bytes: + """ + Given peer's DHPublicKey, carry out the key exchange and + return shared key as bytes. + """ + + @abc.abstractmethod + def private_numbers(self) -> DHPrivateNumbers: + """ + Returns a DHPrivateNumbers. + """ + + @abc.abstractmethod + def private_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PrivateFormat, + encryption_algorithm: _serialization.KeySerializationEncryption, + ) -> bytes: + """ + Returns the key serialized as bytes. + """ + + @abc.abstractmethod + def __copy__(self) -> DHPrivateKey: + """ + Returns a copy. + """ + + +DHPrivateKeyWithSerialization = DHPrivateKey +DHPrivateKey.register(rust_openssl.dh.DHPrivateKey) diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/dsa.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/dsa.py new file mode 100644 index 00000000..21d78ba9 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/dsa.py @@ -0,0 +1,167 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc +import typing + +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.primitives import _serialization, hashes +from cryptography.hazmat.primitives.asymmetric import utils as asym_utils +from cryptography.utils import Buffer + + +class DSAParameters(metaclass=abc.ABCMeta): + @abc.abstractmethod + def generate_private_key(self) -> DSAPrivateKey: + """ + Generates and returns a DSAPrivateKey. + """ + + @abc.abstractmethod + def parameter_numbers(self) -> DSAParameterNumbers: + """ + Returns a DSAParameterNumbers. + """ + + +DSAParametersWithNumbers = DSAParameters +DSAParameters.register(rust_openssl.dsa.DSAParameters) + + +class DSAPrivateKey(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def key_size(self) -> int: + """ + The bit length of the prime modulus. + """ + + @abc.abstractmethod + def public_key(self) -> DSAPublicKey: + """ + The DSAPublicKey associated with this private key. + """ + + @abc.abstractmethod + def parameters(self) -> DSAParameters: + """ + The DSAParameters object associated with this private key. + """ + + @abc.abstractmethod + def sign( + self, + data: Buffer, + algorithm: asym_utils.Prehashed | hashes.HashAlgorithm, + ) -> bytes: + """ + Signs the data + """ + + @abc.abstractmethod + def private_numbers(self) -> DSAPrivateNumbers: + """ + Returns a DSAPrivateNumbers. + """ + + @abc.abstractmethod + def private_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PrivateFormat, + encryption_algorithm: _serialization.KeySerializationEncryption, + ) -> bytes: + """ + Returns the key serialized as bytes. + """ + + @abc.abstractmethod + def __copy__(self) -> DSAPrivateKey: + """ + Returns a copy. + """ + + +DSAPrivateKeyWithSerialization = DSAPrivateKey +DSAPrivateKey.register(rust_openssl.dsa.DSAPrivateKey) + + +class DSAPublicKey(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def key_size(self) -> int: + """ + The bit length of the prime modulus. + """ + + @abc.abstractmethod + def parameters(self) -> DSAParameters: + """ + The DSAParameters object associated with this public key. + """ + + @abc.abstractmethod + def public_numbers(self) -> DSAPublicNumbers: + """ + Returns a DSAPublicNumbers. + """ + + @abc.abstractmethod + def public_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PublicFormat, + ) -> bytes: + """ + Returns the key serialized as bytes. + """ + + @abc.abstractmethod + def verify( + self, + signature: Buffer, + data: Buffer, + algorithm: asym_utils.Prehashed | hashes.HashAlgorithm, + ) -> None: + """ + Verifies the signature of the data. + """ + + @abc.abstractmethod + def __eq__(self, other: object) -> bool: + """ + Checks equality. + """ + + @abc.abstractmethod + def __copy__(self) -> DSAPublicKey: + """ + Returns a copy. + """ + + +DSAPublicKeyWithSerialization = DSAPublicKey +DSAPublicKey.register(rust_openssl.dsa.DSAPublicKey) + +DSAPrivateNumbers = rust_openssl.dsa.DSAPrivateNumbers +DSAPublicNumbers = rust_openssl.dsa.DSAPublicNumbers +DSAParameterNumbers = rust_openssl.dsa.DSAParameterNumbers + + +def generate_parameters( + key_size: int, backend: typing.Any = None +) -> DSAParameters: + if key_size not in (1024, 2048, 3072, 4096): + raise ValueError("Key size must be 1024, 2048, 3072, or 4096 bits.") + + return rust_openssl.dsa.generate_parameters(key_size) + + +def generate_private_key( + key_size: int, backend: typing.Any = None +) -> DSAPrivateKey: + parameters = generate_parameters(key_size) + return parameters.generate_private_key() diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/ec.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/ec.py new file mode 100644 index 00000000..a13d9827 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/ec.py @@ -0,0 +1,447 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc +import typing + +from cryptography import utils +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat._oid import ObjectIdentifier +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.primitives import _serialization, hashes +from cryptography.hazmat.primitives.asymmetric import utils as asym_utils + + +class EllipticCurveOID: + SECP192R1 = ObjectIdentifier("1.2.840.10045.3.1.1") + SECP224R1 = ObjectIdentifier("1.3.132.0.33") + SECP256K1 = ObjectIdentifier("1.3.132.0.10") + SECP256R1 = ObjectIdentifier("1.2.840.10045.3.1.7") + SECP384R1 = ObjectIdentifier("1.3.132.0.34") + SECP521R1 = ObjectIdentifier("1.3.132.0.35") + BRAINPOOLP256R1 = ObjectIdentifier("1.3.36.3.3.2.8.1.1.7") + BRAINPOOLP384R1 = ObjectIdentifier("1.3.36.3.3.2.8.1.1.11") + BRAINPOOLP512R1 = ObjectIdentifier("1.3.36.3.3.2.8.1.1.13") + SECT163K1 = ObjectIdentifier("1.3.132.0.1") + SECT163R2 = ObjectIdentifier("1.3.132.0.15") + SECT233K1 = ObjectIdentifier("1.3.132.0.26") + SECT233R1 = ObjectIdentifier("1.3.132.0.27") + SECT283K1 = ObjectIdentifier("1.3.132.0.16") + SECT283R1 = ObjectIdentifier("1.3.132.0.17") + SECT409K1 = ObjectIdentifier("1.3.132.0.36") + SECT409R1 = ObjectIdentifier("1.3.132.0.37") + SECT571K1 = ObjectIdentifier("1.3.132.0.38") + SECT571R1 = ObjectIdentifier("1.3.132.0.39") + + +class EllipticCurve(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def name(self) -> str: + """ + The name of the curve. e.g. secp256r1. + """ + + @property + @abc.abstractmethod + def key_size(self) -> int: + """ + Bit size of a secret scalar for the curve. + """ + + @property + @abc.abstractmethod + def group_order(self) -> int: + """ + The order of the curve's group. + """ + + +class EllipticCurveSignatureAlgorithm(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def algorithm( + self, + ) -> asym_utils.Prehashed | hashes.HashAlgorithm: + """ + The digest algorithm used with this signature. + """ + + +class EllipticCurvePrivateKey(metaclass=abc.ABCMeta): + @abc.abstractmethod + def exchange( + self, algorithm: ECDH, peer_public_key: EllipticCurvePublicKey + ) -> bytes: + """ + Performs a key exchange operation using the provided algorithm with the + provided peer's public key. + """ + + @abc.abstractmethod + def public_key(self) -> EllipticCurvePublicKey: + """ + The EllipticCurvePublicKey for this private key. + """ + + @property + @abc.abstractmethod + def curve(self) -> EllipticCurve: + """ + The EllipticCurve that this key is on. + """ + + @property + @abc.abstractmethod + def key_size(self) -> int: + """ + Bit size of a secret scalar for the curve. + """ + + @abc.abstractmethod + def sign( + self, + data: utils.Buffer, + signature_algorithm: EllipticCurveSignatureAlgorithm, + ) -> bytes: + """ + Signs the data + """ + + @abc.abstractmethod + def private_numbers(self) -> EllipticCurvePrivateNumbers: + """ + Returns an EllipticCurvePrivateNumbers. + """ + + @abc.abstractmethod + def private_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PrivateFormat, + encryption_algorithm: _serialization.KeySerializationEncryption, + ) -> bytes: + """ + Returns the key serialized as bytes. + """ + + @abc.abstractmethod + def __copy__(self) -> EllipticCurvePrivateKey: + """ + Returns a copy. + """ + + +EllipticCurvePrivateKeyWithSerialization = EllipticCurvePrivateKey +EllipticCurvePrivateKey.register(rust_openssl.ec.ECPrivateKey) + + +class EllipticCurvePublicKey(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def curve(self) -> EllipticCurve: + """ + The EllipticCurve that this key is on. + """ + + @property + @abc.abstractmethod + def key_size(self) -> int: + """ + Bit size of a secret scalar for the curve. + """ + + @abc.abstractmethod + def public_numbers(self) -> EllipticCurvePublicNumbers: + """ + Returns an EllipticCurvePublicNumbers. + """ + + @abc.abstractmethod + def public_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PublicFormat, + ) -> bytes: + """ + Returns the key serialized as bytes. + """ + + @abc.abstractmethod + def verify( + self, + signature: utils.Buffer, + data: utils.Buffer, + signature_algorithm: EllipticCurveSignatureAlgorithm, + ) -> None: + """ + Verifies the signature of the data. + """ + + @classmethod + def from_encoded_point( + cls, curve: EllipticCurve, data: bytes + ) -> EllipticCurvePublicKey: + utils._check_bytes("data", data) + + if len(data) == 0: + raise ValueError("data must not be an empty byte string") + + if data[0] not in [0x02, 0x03, 0x04]: + raise ValueError("Unsupported elliptic curve point type") + + return rust_openssl.ec.from_public_bytes(curve, data) + + @abc.abstractmethod + def __eq__(self, other: object) -> bool: + """ + Checks equality. + """ + + @abc.abstractmethod + def __copy__(self) -> EllipticCurvePublicKey: + """ + Returns a copy. + """ + + +EllipticCurvePublicKeyWithSerialization = EllipticCurvePublicKey +EllipticCurvePublicKey.register(rust_openssl.ec.ECPublicKey) + +EllipticCurvePrivateNumbers = rust_openssl.ec.EllipticCurvePrivateNumbers +EllipticCurvePublicNumbers = rust_openssl.ec.EllipticCurvePublicNumbers + + +class SECT571R1(EllipticCurve): + name = "sect571r1" + key_size = 570 + group_order = 0x3FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFE661CE18FF55987308059B186823851EC7DD9CA1161DE93D5174D66E8382E9BB2FE84E47 # noqa: E501 + + +class SECT409R1(EllipticCurve): + name = "sect409r1" + key_size = 409 + group_order = 0x10000000000000000000000000000000000000000000000000001E2AAD6A612F33307BE5FA47C3C9E052F838164CD37D9A21173 # noqa: E501 + + +class SECT283R1(EllipticCurve): + name = "sect283r1" + key_size = 283 + group_order = 0x3FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEF90399660FC938A90165B042A7CEFADB307 # noqa: E501 + + +class SECT233R1(EllipticCurve): + name = "sect233r1" + key_size = 233 + group_order = 0x1000000000000000000000000000013E974E72F8A6922031D2603CFE0D7 + + +class SECT163R2(EllipticCurve): + name = "sect163r2" + key_size = 163 + group_order = 0x40000000000000000000292FE77E70C12A4234C33 + + +class SECT571K1(EllipticCurve): + name = "sect571k1" + key_size = 571 + group_order = 0x20000000000000000000000000000000000000000000000000000000000000000000000131850E1F19A63E4B391A8DB917F4138B630D84BE5D639381E91DEB45CFE778F637C1001 # noqa: E501 + + +class SECT409K1(EllipticCurve): + name = "sect409k1" + key_size = 409 + group_order = 0x7FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFE5F83B2D4EA20400EC4557D5ED3E3E7CA5B4B5C83B8E01E5FCF # noqa: E501 + + +class SECT283K1(EllipticCurve): + name = "sect283k1" + key_size = 283 + group_order = 0x1FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFE9AE2ED07577265DFF7F94451E061E163C61 # noqa: E501 + + +class SECT233K1(EllipticCurve): + name = "sect233k1" + key_size = 233 + group_order = 0x8000000000000000000000000000069D5BB915BCD46EFB1AD5F173ABDF + + +class SECT163K1(EllipticCurve): + name = "sect163k1" + key_size = 163 + group_order = 0x4000000000000000000020108A2E0CC0D99F8A5EF + + +class SECP521R1(EllipticCurve): + name = "secp521r1" + key_size = 521 + group_order = 0x1FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFA51868783BF2F966B7FCC0148F709A5D03BB5C9B8899C47AEBB6FB71E91386409 # noqa: E501 + + +class SECP384R1(EllipticCurve): + name = "secp384r1" + key_size = 384 + group_order = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFC7634D81F4372DDF581A0DB248B0A77AECEC196ACCC52973 # noqa: E501 + + +class SECP256R1(EllipticCurve): + name = "secp256r1" + key_size = 256 + group_order = ( + 0xFFFFFFFF00000000FFFFFFFFFFFFFFFFBCE6FAADA7179E84F3B9CAC2FC632551 + ) + + +class SECP256K1(EllipticCurve): + name = "secp256k1" + key_size = 256 + group_order = ( + 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141 + ) + + +class SECP224R1(EllipticCurve): + name = "secp224r1" + key_size = 224 + group_order = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFF16A2E0B8F03E13DD29455C5C2A3D + + +class SECP192R1(EllipticCurve): + name = "secp192r1" + key_size = 192 + group_order = 0xFFFFFFFFFFFFFFFFFFFFFFFF99DEF836146BC9B1B4D22831 + + +class BrainpoolP256R1(EllipticCurve): + name = "brainpoolP256r1" + key_size = 256 + group_order = ( + 0xA9FB57DBA1EEA9BC3E660A909D838D718C397AA3B561A6F7901E0E82974856A7 + ) + + +class BrainpoolP384R1(EllipticCurve): + name = "brainpoolP384r1" + key_size = 384 + group_order = 0x8CB91E82A3386D280F5D6F7E50E641DF152F7109ED5456B31F166E6CAC0425A7CF3AB6AF6B7FC3103B883202E9046565 # noqa: E501 + + +class BrainpoolP512R1(EllipticCurve): + name = "brainpoolP512r1" + key_size = 512 + group_order = 0xAADD9DB8DBE9C48B3FD4E6AE33C9FC07CB308DB3B3C9D20ED6639CCA70330870553E5C414CA92619418661197FAC10471DB1D381085DDADDB58796829CA90069 # noqa: E501 + + +_CURVE_TYPES: dict[str, EllipticCurve] = { + "prime192v1": SECP192R1(), + "prime256v1": SECP256R1(), + "secp192r1": SECP192R1(), + "secp224r1": SECP224R1(), + "secp256r1": SECP256R1(), + "secp384r1": SECP384R1(), + "secp521r1": SECP521R1(), + "secp256k1": SECP256K1(), + "sect163k1": SECT163K1(), + "sect233k1": SECT233K1(), + "sect283k1": SECT283K1(), + "sect409k1": SECT409K1(), + "sect571k1": SECT571K1(), + "sect163r2": SECT163R2(), + "sect233r1": SECT233R1(), + "sect283r1": SECT283R1(), + "sect409r1": SECT409R1(), + "sect571r1": SECT571R1(), + "brainpoolP256r1": BrainpoolP256R1(), + "brainpoolP384r1": BrainpoolP384R1(), + "brainpoolP512r1": BrainpoolP512R1(), +} + + +class ECDSA(EllipticCurveSignatureAlgorithm): + def __init__( + self, + algorithm: asym_utils.Prehashed | hashes.HashAlgorithm, + deterministic_signing: bool = False, + ): + from cryptography.hazmat.backends.openssl.backend import backend + + if ( + deterministic_signing + and not backend.ecdsa_deterministic_supported() + ): + raise UnsupportedAlgorithm( + "ECDSA with deterministic signature (RFC 6979) is not " + "supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, + ) + self._algorithm = algorithm + self._deterministic_signing = deterministic_signing + + @property + def algorithm( + self, + ) -> asym_utils.Prehashed | hashes.HashAlgorithm: + return self._algorithm + + @property + def deterministic_signing( + self, + ) -> bool: + return self._deterministic_signing + + +generate_private_key = rust_openssl.ec.generate_private_key + + +def derive_private_key( + private_value: int, + curve: EllipticCurve, + backend: typing.Any = None, +) -> EllipticCurvePrivateKey: + if not isinstance(private_value, int): + raise TypeError("private_value must be an integer type.") + + if private_value <= 0: + raise ValueError("private_value must be a positive integer.") + + return rust_openssl.ec.derive_private_key(private_value, curve) + + +class ECDH: + pass + + +_OID_TO_CURVE = { + EllipticCurveOID.SECP192R1: SECP192R1, + EllipticCurveOID.SECP224R1: SECP224R1, + EllipticCurveOID.SECP256K1: SECP256K1, + EllipticCurveOID.SECP256R1: SECP256R1, + EllipticCurveOID.SECP384R1: SECP384R1, + EllipticCurveOID.SECP521R1: SECP521R1, + EllipticCurveOID.BRAINPOOLP256R1: BrainpoolP256R1, + EllipticCurveOID.BRAINPOOLP384R1: BrainpoolP384R1, + EllipticCurveOID.BRAINPOOLP512R1: BrainpoolP512R1, + EllipticCurveOID.SECT163K1: SECT163K1, + EllipticCurveOID.SECT163R2: SECT163R2, + EllipticCurveOID.SECT233K1: SECT233K1, + EllipticCurveOID.SECT233R1: SECT233R1, + EllipticCurveOID.SECT283K1: SECT283K1, + EllipticCurveOID.SECT283R1: SECT283R1, + EllipticCurveOID.SECT409K1: SECT409K1, + EllipticCurveOID.SECT409R1: SECT409R1, + EllipticCurveOID.SECT571K1: SECT571K1, + EllipticCurveOID.SECT571R1: SECT571R1, +} + + +def get_curve_for_oid(oid: ObjectIdentifier) -> type[EllipticCurve]: + try: + return _OID_TO_CURVE[oid] + except KeyError: + raise LookupError( + "The provided object identifier has no matching elliptic " + "curve class" + ) diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/ed25519.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/ed25519.py new file mode 100644 index 00000000..e576dc9f --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/ed25519.py @@ -0,0 +1,129 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc + +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.primitives import _serialization +from cryptography.utils import Buffer + + +class Ed25519PublicKey(metaclass=abc.ABCMeta): + @classmethod + def from_public_bytes(cls, data: bytes) -> Ed25519PublicKey: + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.ed25519_supported(): + raise UnsupportedAlgorithm( + "ed25519 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, + ) + + return rust_openssl.ed25519.from_public_bytes(data) + + @abc.abstractmethod + def public_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PublicFormat, + ) -> bytes: + """ + The serialized bytes of the public key. + """ + + @abc.abstractmethod + def public_bytes_raw(self) -> bytes: + """ + The raw bytes of the public key. + Equivalent to public_bytes(Raw, Raw). + """ + + @abc.abstractmethod + def verify(self, signature: Buffer, data: Buffer) -> None: + """ + Verify the signature. + """ + + @abc.abstractmethod + def __eq__(self, other: object) -> bool: + """ + Checks equality. + """ + + @abc.abstractmethod + def __copy__(self) -> Ed25519PublicKey: + """ + Returns a copy. + """ + + +Ed25519PublicKey.register(rust_openssl.ed25519.Ed25519PublicKey) + + +class Ed25519PrivateKey(metaclass=abc.ABCMeta): + @classmethod + def generate(cls) -> Ed25519PrivateKey: + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.ed25519_supported(): + raise UnsupportedAlgorithm( + "ed25519 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, + ) + + return rust_openssl.ed25519.generate_key() + + @classmethod + def from_private_bytes(cls, data: Buffer) -> Ed25519PrivateKey: + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.ed25519_supported(): + raise UnsupportedAlgorithm( + "ed25519 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, + ) + + return rust_openssl.ed25519.from_private_bytes(data) + + @abc.abstractmethod + def public_key(self) -> Ed25519PublicKey: + """ + The Ed25519PublicKey derived from the private key. + """ + + @abc.abstractmethod + def private_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PrivateFormat, + encryption_algorithm: _serialization.KeySerializationEncryption, + ) -> bytes: + """ + The serialized bytes of the private key. + """ + + @abc.abstractmethod + def private_bytes_raw(self) -> bytes: + """ + The raw bytes of the private key. + Equivalent to private_bytes(Raw, Raw, NoEncryption()). + """ + + @abc.abstractmethod + def sign(self, data: Buffer) -> bytes: + """ + Signs the data. + """ + + @abc.abstractmethod + def __copy__(self) -> Ed25519PrivateKey: + """ + Returns a copy. + """ + + +Ed25519PrivateKey.register(rust_openssl.ed25519.Ed25519PrivateKey) diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/ed448.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/ed448.py new file mode 100644 index 00000000..89db2098 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/ed448.py @@ -0,0 +1,131 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc + +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.primitives import _serialization +from cryptography.utils import Buffer + + +class Ed448PublicKey(metaclass=abc.ABCMeta): + @classmethod + def from_public_bytes(cls, data: bytes) -> Ed448PublicKey: + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.ed448_supported(): + raise UnsupportedAlgorithm( + "ed448 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, + ) + + return rust_openssl.ed448.from_public_bytes(data) + + @abc.abstractmethod + def public_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PublicFormat, + ) -> bytes: + """ + The serialized bytes of the public key. + """ + + @abc.abstractmethod + def public_bytes_raw(self) -> bytes: + """ + The raw bytes of the public key. + Equivalent to public_bytes(Raw, Raw). + """ + + @abc.abstractmethod + def verify(self, signature: Buffer, data: Buffer) -> None: + """ + Verify the signature. + """ + + @abc.abstractmethod + def __eq__(self, other: object) -> bool: + """ + Checks equality. + """ + + @abc.abstractmethod + def __copy__(self) -> Ed448PublicKey: + """ + Returns a copy. + """ + + +if hasattr(rust_openssl, "ed448"): + Ed448PublicKey.register(rust_openssl.ed448.Ed448PublicKey) + + +class Ed448PrivateKey(metaclass=abc.ABCMeta): + @classmethod + def generate(cls) -> Ed448PrivateKey: + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.ed448_supported(): + raise UnsupportedAlgorithm( + "ed448 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, + ) + + return rust_openssl.ed448.generate_key() + + @classmethod + def from_private_bytes(cls, data: Buffer) -> Ed448PrivateKey: + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.ed448_supported(): + raise UnsupportedAlgorithm( + "ed448 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, + ) + + return rust_openssl.ed448.from_private_bytes(data) + + @abc.abstractmethod + def public_key(self) -> Ed448PublicKey: + """ + The Ed448PublicKey derived from the private key. + """ + + @abc.abstractmethod + def sign(self, data: Buffer) -> bytes: + """ + Signs the data. + """ + + @abc.abstractmethod + def private_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PrivateFormat, + encryption_algorithm: _serialization.KeySerializationEncryption, + ) -> bytes: + """ + The serialized bytes of the private key. + """ + + @abc.abstractmethod + def private_bytes_raw(self) -> bytes: + """ + The raw bytes of the private key. + Equivalent to private_bytes(Raw, Raw, NoEncryption()). + """ + + @abc.abstractmethod + def __copy__(self) -> Ed448PrivateKey: + """ + Returns a copy. + """ + + +if hasattr(rust_openssl, "x448"): + Ed448PrivateKey.register(rust_openssl.ed448.Ed448PrivateKey) diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/padding.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/padding.py new file mode 100644 index 00000000..5121a288 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/padding.py @@ -0,0 +1,111 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc + +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives._asymmetric import ( + AsymmetricPadding as AsymmetricPadding, +) +from cryptography.hazmat.primitives.asymmetric import rsa + + +class PKCS1v15(AsymmetricPadding): + name = "EMSA-PKCS1-v1_5" + + +class _MaxLength: + "Sentinel value for `MAX_LENGTH`." + + +class _Auto: + "Sentinel value for `AUTO`." + + +class _DigestLength: + "Sentinel value for `DIGEST_LENGTH`." + + +class PSS(AsymmetricPadding): + MAX_LENGTH = _MaxLength() + AUTO = _Auto() + DIGEST_LENGTH = _DigestLength() + name = "EMSA-PSS" + _salt_length: int | _MaxLength | _Auto | _DigestLength + + def __init__( + self, + mgf: MGF, + salt_length: int | _MaxLength | _Auto | _DigestLength, + ) -> None: + self._mgf = mgf + + if not isinstance( + salt_length, (int, _MaxLength, _Auto, _DigestLength) + ): + raise TypeError( + "salt_length must be an integer, MAX_LENGTH, " + "DIGEST_LENGTH, or AUTO" + ) + + if isinstance(salt_length, int) and salt_length < 0: + raise ValueError("salt_length must be zero or greater.") + + self._salt_length = salt_length + + @property + def mgf(self) -> MGF: + return self._mgf + + +class OAEP(AsymmetricPadding): + name = "EME-OAEP" + + def __init__( + self, + mgf: MGF, + algorithm: hashes.HashAlgorithm, + label: bytes | None, + ): + if not isinstance(algorithm, hashes.HashAlgorithm): + raise TypeError("Expected instance of hashes.HashAlgorithm.") + + self._mgf = mgf + self._algorithm = algorithm + self._label = label + + @property + def algorithm(self) -> hashes.HashAlgorithm: + return self._algorithm + + @property + def mgf(self) -> MGF: + return self._mgf + + +class MGF(metaclass=abc.ABCMeta): + _algorithm: hashes.HashAlgorithm + + +class MGF1(MGF): + def __init__(self, algorithm: hashes.HashAlgorithm): + if not isinstance(algorithm, hashes.HashAlgorithm): + raise TypeError("Expected instance of hashes.HashAlgorithm.") + + self._algorithm = algorithm + + +def calculate_max_pss_salt_length( + key: rsa.RSAPrivateKey | rsa.RSAPublicKey, + hash_algorithm: hashes.HashAlgorithm, +) -> int: + if not isinstance(key, (rsa.RSAPrivateKey, rsa.RSAPublicKey)): + raise TypeError("key must be an RSA public or private key") + # bit length - 1 per RFC 3447 + emlen = (key.key_size + 6) // 8 + salt_length = emlen - hash_algorithm.digest_size - 2 + assert salt_length >= 0 + return salt_length diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/rsa.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/rsa.py new file mode 100644 index 00000000..f94812e1 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/rsa.py @@ -0,0 +1,285 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc +import random +import typing +from math import gcd + +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.primitives import _serialization, hashes +from cryptography.hazmat.primitives._asymmetric import AsymmetricPadding +from cryptography.hazmat.primitives.asymmetric import utils as asym_utils + + +class RSAPrivateKey(metaclass=abc.ABCMeta): + @abc.abstractmethod + def decrypt(self, ciphertext: bytes, padding: AsymmetricPadding) -> bytes: + """ + Decrypts the provided ciphertext. + """ + + @property + @abc.abstractmethod + def key_size(self) -> int: + """ + The bit length of the public modulus. + """ + + @abc.abstractmethod + def public_key(self) -> RSAPublicKey: + """ + The RSAPublicKey associated with this private key. + """ + + @abc.abstractmethod + def sign( + self, + data: bytes, + padding: AsymmetricPadding, + algorithm: asym_utils.Prehashed | hashes.HashAlgorithm, + ) -> bytes: + """ + Signs the data. + """ + + @abc.abstractmethod + def private_numbers(self) -> RSAPrivateNumbers: + """ + Returns an RSAPrivateNumbers. + """ + + @abc.abstractmethod + def private_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PrivateFormat, + encryption_algorithm: _serialization.KeySerializationEncryption, + ) -> bytes: + """ + Returns the key serialized as bytes. + """ + + @abc.abstractmethod + def __copy__(self) -> RSAPrivateKey: + """ + Returns a copy. + """ + + +RSAPrivateKeyWithSerialization = RSAPrivateKey +RSAPrivateKey.register(rust_openssl.rsa.RSAPrivateKey) + + +class RSAPublicKey(metaclass=abc.ABCMeta): + @abc.abstractmethod + def encrypt(self, plaintext: bytes, padding: AsymmetricPadding) -> bytes: + """ + Encrypts the given plaintext. + """ + + @property + @abc.abstractmethod + def key_size(self) -> int: + """ + The bit length of the public modulus. + """ + + @abc.abstractmethod + def public_numbers(self) -> RSAPublicNumbers: + """ + Returns an RSAPublicNumbers + """ + + @abc.abstractmethod + def public_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PublicFormat, + ) -> bytes: + """ + Returns the key serialized as bytes. + """ + + @abc.abstractmethod + def verify( + self, + signature: bytes, + data: bytes, + padding: AsymmetricPadding, + algorithm: asym_utils.Prehashed | hashes.HashAlgorithm, + ) -> None: + """ + Verifies the signature of the data. + """ + + @abc.abstractmethod + def recover_data_from_signature( + self, + signature: bytes, + padding: AsymmetricPadding, + algorithm: hashes.HashAlgorithm | None, + ) -> bytes: + """ + Recovers the original data from the signature. + """ + + @abc.abstractmethod + def __eq__(self, other: object) -> bool: + """ + Checks equality. + """ + + @abc.abstractmethod + def __copy__(self) -> RSAPublicKey: + """ + Returns a copy. + """ + + +RSAPublicKeyWithSerialization = RSAPublicKey +RSAPublicKey.register(rust_openssl.rsa.RSAPublicKey) + +RSAPrivateNumbers = rust_openssl.rsa.RSAPrivateNumbers +RSAPublicNumbers = rust_openssl.rsa.RSAPublicNumbers + + +def generate_private_key( + public_exponent: int, + key_size: int, + backend: typing.Any = None, +) -> RSAPrivateKey: + _verify_rsa_parameters(public_exponent, key_size) + return rust_openssl.rsa.generate_private_key(public_exponent, key_size) + + +def _verify_rsa_parameters(public_exponent: int, key_size: int) -> None: + if public_exponent not in (3, 65537): + raise ValueError( + "public_exponent must be either 3 (for legacy compatibility) or " + "65537. Almost everyone should choose 65537 here!" + ) + + if key_size < 1024: + raise ValueError("key_size must be at least 1024-bits.") + + +def _modinv(e: int, m: int) -> int: + """ + Modular Multiplicative Inverse. Returns x such that: (x*e) mod m == 1 + """ + x1, x2 = 1, 0 + a, b = e, m + while b > 0: + q, r = divmod(a, b) + xn = x1 - q * x2 + a, b, x1, x2 = b, r, x2, xn + return x1 % m + + +def rsa_crt_iqmp(p: int, q: int) -> int: + """ + Compute the CRT (q ** -1) % p value from RSA primes p and q. + """ + if p <= 1 or q <= 1: + raise ValueError("Values can't be <= 1") + return _modinv(q, p) + + +def rsa_crt_dmp1(private_exponent: int, p: int) -> int: + """ + Compute the CRT private_exponent % (p - 1) value from the RSA + private_exponent (d) and p. + """ + if private_exponent <= 1 or p <= 1: + raise ValueError("Values can't be <= 1") + return private_exponent % (p - 1) + + +def rsa_crt_dmq1(private_exponent: int, q: int) -> int: + """ + Compute the CRT private_exponent % (q - 1) value from the RSA + private_exponent (d) and q. + """ + if private_exponent <= 1 or q <= 1: + raise ValueError("Values can't be <= 1") + return private_exponent % (q - 1) + + +def rsa_recover_private_exponent(e: int, p: int, q: int) -> int: + """ + Compute the RSA private_exponent (d) given the public exponent (e) + and the RSA primes p and q. + + This uses the Carmichael totient function to generate the + smallest possible working value of the private exponent. + """ + # This lambda_n is the Carmichael totient function. + # The original RSA paper uses the Euler totient function + # here: phi_n = (p - 1) * (q - 1) + # Either version of the private exponent will work, but the + # one generated by the older formulation may be larger + # than necessary. (lambda_n always divides phi_n) + # + # TODO: Replace with lcm(p - 1, q - 1) once the minimum + # supported Python version is >= 3.9. + if e <= 1 or p <= 1 or q <= 1: + raise ValueError("Values can't be <= 1") + lambda_n = (p - 1) * (q - 1) // gcd(p - 1, q - 1) + return _modinv(e, lambda_n) + + +# Controls the number of iterations rsa_recover_prime_factors will perform +# to obtain the prime factors. +_MAX_RECOVERY_ATTEMPTS = 500 + + +def rsa_recover_prime_factors(n: int, e: int, d: int) -> tuple[int, int]: + """ + Compute factors p and q from the private exponent d. We assume that n has + no more than two factors. This function is adapted from code in PyCrypto. + """ + # reject invalid values early + if d <= 1 or e <= 1: + raise ValueError("d, e can't be <= 1") + if 17 != pow(17, e * d, n): + raise ValueError("n, d, e don't match") + # See 8.2.2(i) in Handbook of Applied Cryptography. + ktot = d * e - 1 + # The quantity d*e-1 is a multiple of phi(n), even, + # and can be represented as t*2^s. + t = ktot + while t % 2 == 0: + t = t // 2 + # Cycle through all multiplicative inverses in Zn. + # The algorithm is non-deterministic, but there is a 50% chance + # any candidate a leads to successful factoring. + # See "Digitalized Signatures and Public Key Functions as Intractable + # as Factorization", M. Rabin, 1979 + spotted = False + tries = 0 + while not spotted and tries < _MAX_RECOVERY_ATTEMPTS: + a = random.randint(2, n - 1) + tries += 1 + k = t + # Cycle through all values a^{t*2^i}=a^k + while k < ktot: + cand = pow(a, k, n) + # Check if a^k is a non-trivial root of unity (mod n) + if cand != 1 and cand != (n - 1) and pow(cand, 2, n) == 1: + # We have found a number such that (cand-1)(cand+1)=0 (mod n). + # Either of the terms divides n. + p = gcd(cand + 1, n) + spotted = True + break + k *= 2 + if not spotted: + raise ValueError("Unable to compute factors p and q from exponent d.") + # Found ! + q, r = divmod(n, p) + assert r == 0 + p, q = sorted((p, q), reverse=True) + return (p, q) diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/types.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/types.py new file mode 100644 index 00000000..1fe4eaf5 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/types.py @@ -0,0 +1,111 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import typing + +from cryptography import utils +from cryptography.hazmat.primitives.asymmetric import ( + dh, + dsa, + ec, + ed448, + ed25519, + rsa, + x448, + x25519, +) + +# Every asymmetric key type +PublicKeyTypes = typing.Union[ + dh.DHPublicKey, + dsa.DSAPublicKey, + rsa.RSAPublicKey, + ec.EllipticCurvePublicKey, + ed25519.Ed25519PublicKey, + ed448.Ed448PublicKey, + x25519.X25519PublicKey, + x448.X448PublicKey, +] +PUBLIC_KEY_TYPES = PublicKeyTypes +utils.deprecated( + PUBLIC_KEY_TYPES, + __name__, + "Use PublicKeyTypes instead", + utils.DeprecatedIn40, + name="PUBLIC_KEY_TYPES", +) +# Every asymmetric key type +PrivateKeyTypes = typing.Union[ + dh.DHPrivateKey, + ed25519.Ed25519PrivateKey, + ed448.Ed448PrivateKey, + rsa.RSAPrivateKey, + dsa.DSAPrivateKey, + ec.EllipticCurvePrivateKey, + x25519.X25519PrivateKey, + x448.X448PrivateKey, +] +PRIVATE_KEY_TYPES = PrivateKeyTypes +utils.deprecated( + PRIVATE_KEY_TYPES, + __name__, + "Use PrivateKeyTypes instead", + utils.DeprecatedIn40, + name="PRIVATE_KEY_TYPES", +) +# Just the key types we allow to be used for x509 signing. This mirrors +# the certificate public key types +CertificateIssuerPrivateKeyTypes = typing.Union[ + ed25519.Ed25519PrivateKey, + ed448.Ed448PrivateKey, + rsa.RSAPrivateKey, + dsa.DSAPrivateKey, + ec.EllipticCurvePrivateKey, +] +CERTIFICATE_PRIVATE_KEY_TYPES = CertificateIssuerPrivateKeyTypes +utils.deprecated( + CERTIFICATE_PRIVATE_KEY_TYPES, + __name__, + "Use CertificateIssuerPrivateKeyTypes instead", + utils.DeprecatedIn40, + name="CERTIFICATE_PRIVATE_KEY_TYPES", +) +# Just the key types we allow to be used for x509 signing. This mirrors +# the certificate private key types +CertificateIssuerPublicKeyTypes = typing.Union[ + dsa.DSAPublicKey, + rsa.RSAPublicKey, + ec.EllipticCurvePublicKey, + ed25519.Ed25519PublicKey, + ed448.Ed448PublicKey, +] +CERTIFICATE_ISSUER_PUBLIC_KEY_TYPES = CertificateIssuerPublicKeyTypes +utils.deprecated( + CERTIFICATE_ISSUER_PUBLIC_KEY_TYPES, + __name__, + "Use CertificateIssuerPublicKeyTypes instead", + utils.DeprecatedIn40, + name="CERTIFICATE_ISSUER_PUBLIC_KEY_TYPES", +) +# This type removes DHPublicKey. x448/x25519 can be a public key +# but cannot be used in signing so they are allowed here. +CertificatePublicKeyTypes = typing.Union[ + dsa.DSAPublicKey, + rsa.RSAPublicKey, + ec.EllipticCurvePublicKey, + ed25519.Ed25519PublicKey, + ed448.Ed448PublicKey, + x25519.X25519PublicKey, + x448.X448PublicKey, +] +CERTIFICATE_PUBLIC_KEY_TYPES = CertificatePublicKeyTypes +utils.deprecated( + CERTIFICATE_PUBLIC_KEY_TYPES, + __name__, + "Use CertificatePublicKeyTypes instead", + utils.DeprecatedIn40, + name="CERTIFICATE_PUBLIC_KEY_TYPES", +) diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/utils.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/utils.py new file mode 100644 index 00000000..826b9567 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/utils.py @@ -0,0 +1,24 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.hazmat.bindings._rust import asn1 +from cryptography.hazmat.primitives import hashes + +decode_dss_signature = asn1.decode_dss_signature +encode_dss_signature = asn1.encode_dss_signature + + +class Prehashed: + def __init__(self, algorithm: hashes.HashAlgorithm): + if not isinstance(algorithm, hashes.HashAlgorithm): + raise TypeError("Expected instance of HashAlgorithm.") + + self._algorithm = algorithm + self._digest_size = algorithm.digest_size + + @property + def digest_size(self) -> int: + return self._digest_size diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/x25519.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/x25519.py new file mode 100644 index 00000000..a4993766 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/x25519.py @@ -0,0 +1,122 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc + +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.primitives import _serialization +from cryptography.utils import Buffer + + +class X25519PublicKey(metaclass=abc.ABCMeta): + @classmethod + def from_public_bytes(cls, data: bytes) -> X25519PublicKey: + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.x25519_supported(): + raise UnsupportedAlgorithm( + "X25519 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM, + ) + + return rust_openssl.x25519.from_public_bytes(data) + + @abc.abstractmethod + def public_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PublicFormat, + ) -> bytes: + """ + The serialized bytes of the public key. + """ + + @abc.abstractmethod + def public_bytes_raw(self) -> bytes: + """ + The raw bytes of the public key. + Equivalent to public_bytes(Raw, Raw). + """ + + @abc.abstractmethod + def __eq__(self, other: object) -> bool: + """ + Checks equality. + """ + + @abc.abstractmethod + def __copy__(self) -> X25519PublicKey: + """ + Returns a copy. + """ + + +X25519PublicKey.register(rust_openssl.x25519.X25519PublicKey) + + +class X25519PrivateKey(metaclass=abc.ABCMeta): + @classmethod + def generate(cls) -> X25519PrivateKey: + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.x25519_supported(): + raise UnsupportedAlgorithm( + "X25519 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM, + ) + return rust_openssl.x25519.generate_key() + + @classmethod + def from_private_bytes(cls, data: Buffer) -> X25519PrivateKey: + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.x25519_supported(): + raise UnsupportedAlgorithm( + "X25519 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM, + ) + + return rust_openssl.x25519.from_private_bytes(data) + + @abc.abstractmethod + def public_key(self) -> X25519PublicKey: + """ + Returns the public key associated with this private key + """ + + @abc.abstractmethod + def private_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PrivateFormat, + encryption_algorithm: _serialization.KeySerializationEncryption, + ) -> bytes: + """ + The serialized bytes of the private key. + """ + + @abc.abstractmethod + def private_bytes_raw(self) -> bytes: + """ + The raw bytes of the private key. + Equivalent to private_bytes(Raw, Raw, NoEncryption()). + """ + + @abc.abstractmethod + def exchange(self, peer_public_key: X25519PublicKey) -> bytes: + """ + Performs a key exchange operation using the provided peer's public key. + """ + + @abc.abstractmethod + def __copy__(self) -> X25519PrivateKey: + """ + Returns a copy. + """ + + +X25519PrivateKey.register(rust_openssl.x25519.X25519PrivateKey) diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/x448.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/x448.py new file mode 100644 index 00000000..c6fd71ba --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/x448.py @@ -0,0 +1,125 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc + +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.primitives import _serialization +from cryptography.utils import Buffer + + +class X448PublicKey(metaclass=abc.ABCMeta): + @classmethod + def from_public_bytes(cls, data: bytes) -> X448PublicKey: + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.x448_supported(): + raise UnsupportedAlgorithm( + "X448 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM, + ) + + return rust_openssl.x448.from_public_bytes(data) + + @abc.abstractmethod + def public_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PublicFormat, + ) -> bytes: + """ + The serialized bytes of the public key. + """ + + @abc.abstractmethod + def public_bytes_raw(self) -> bytes: + """ + The raw bytes of the public key. + Equivalent to public_bytes(Raw, Raw). + """ + + @abc.abstractmethod + def __eq__(self, other: object) -> bool: + """ + Checks equality. + """ + + @abc.abstractmethod + def __copy__(self) -> X448PublicKey: + """ + Returns a copy. + """ + + +if hasattr(rust_openssl, "x448"): + X448PublicKey.register(rust_openssl.x448.X448PublicKey) + + +class X448PrivateKey(metaclass=abc.ABCMeta): + @classmethod + def generate(cls) -> X448PrivateKey: + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.x448_supported(): + raise UnsupportedAlgorithm( + "X448 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM, + ) + + return rust_openssl.x448.generate_key() + + @classmethod + def from_private_bytes(cls, data: Buffer) -> X448PrivateKey: + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.x448_supported(): + raise UnsupportedAlgorithm( + "X448 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM, + ) + + return rust_openssl.x448.from_private_bytes(data) + + @abc.abstractmethod + def public_key(self) -> X448PublicKey: + """ + Returns the public key associated with this private key + """ + + @abc.abstractmethod + def private_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PrivateFormat, + encryption_algorithm: _serialization.KeySerializationEncryption, + ) -> bytes: + """ + The serialized bytes of the private key. + """ + + @abc.abstractmethod + def private_bytes_raw(self) -> bytes: + """ + The raw bytes of the private key. + Equivalent to private_bytes(Raw, Raw, NoEncryption()). + """ + + @abc.abstractmethod + def exchange(self, peer_public_key: X448PublicKey) -> bytes: + """ + Performs a key exchange operation using the provided peer's public key. + """ + + @abc.abstractmethod + def __copy__(self) -> X448PrivateKey: + """ + Returns a copy. + """ + + +if hasattr(rust_openssl, "x448"): + X448PrivateKey.register(rust_openssl.x448.X448PrivateKey) diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__init__.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__init__.py new file mode 100644 index 00000000..10c15d0f --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__init__.py @@ -0,0 +1,27 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.hazmat.primitives._cipheralgorithm import ( + BlockCipherAlgorithm, + CipherAlgorithm, +) +from cryptography.hazmat.primitives.ciphers.base import ( + AEADCipherContext, + AEADDecryptionContext, + AEADEncryptionContext, + Cipher, + CipherContext, +) + +__all__ = [ + "AEADCipherContext", + "AEADDecryptionContext", + "AEADEncryptionContext", + "BlockCipherAlgorithm", + "Cipher", + "CipherAlgorithm", + "CipherContext", +] diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..fe8e6c76 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/aead.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/aead.cpython-312.pyc new file mode 100644 index 00000000..de500ba6 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/aead.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-312.pyc new file mode 100644 index 00000000..41939869 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-312.pyc new file mode 100644 index 00000000..bc8d2cb5 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-312.pyc new file mode 100644 index 00000000..a4fd237e Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/aead.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/aead.py new file mode 100644 index 00000000..c8a582d7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/aead.py @@ -0,0 +1,23 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.hazmat.bindings._rust import openssl as rust_openssl + +__all__ = [ + "AESCCM", + "AESGCM", + "AESGCMSIV", + "AESOCB3", + "AESSIV", + "ChaCha20Poly1305", +] + +AESGCM = rust_openssl.aead.AESGCM +ChaCha20Poly1305 = rust_openssl.aead.ChaCha20Poly1305 +AESCCM = rust_openssl.aead.AESCCM +AESSIV = rust_openssl.aead.AESSIV +AESOCB3 = rust_openssl.aead.AESOCB3 +AESGCMSIV = rust_openssl.aead.AESGCMSIV diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/algorithms.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/algorithms.py new file mode 100644 index 00000000..1e402c7e --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/algorithms.py @@ -0,0 +1,136 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography import utils +from cryptography.hazmat.decrepit.ciphers.algorithms import ( + ARC4 as ARC4, +) +from cryptography.hazmat.decrepit.ciphers.algorithms import ( + CAST5 as CAST5, +) +from cryptography.hazmat.decrepit.ciphers.algorithms import ( + IDEA as IDEA, +) +from cryptography.hazmat.decrepit.ciphers.algorithms import ( + SEED as SEED, +) +from cryptography.hazmat.decrepit.ciphers.algorithms import ( + Blowfish as Blowfish, +) +from cryptography.hazmat.decrepit.ciphers.algorithms import ( + TripleDES as TripleDES, +) +from cryptography.hazmat.primitives._cipheralgorithm import _verify_key_size +from cryptography.hazmat.primitives.ciphers import ( + BlockCipherAlgorithm, + CipherAlgorithm, +) + + +class AES(BlockCipherAlgorithm): + name = "AES" + block_size = 128 + # 512 added to support AES-256-XTS, which uses 512-bit keys + key_sizes = frozenset([128, 192, 256, 512]) + + def __init__(self, key: utils.Buffer): + self.key = _verify_key_size(self, key) + + @property + def key_size(self) -> int: + return len(self.key) * 8 + + +class AES128(BlockCipherAlgorithm): + name = "AES" + block_size = 128 + key_sizes = frozenset([128]) + key_size = 128 + + def __init__(self, key: utils.Buffer): + self.key = _verify_key_size(self, key) + + +class AES256(BlockCipherAlgorithm): + name = "AES" + block_size = 128 + key_sizes = frozenset([256]) + key_size = 256 + + def __init__(self, key: utils.Buffer): + self.key = _verify_key_size(self, key) + + +class Camellia(BlockCipherAlgorithm): + name = "camellia" + block_size = 128 + key_sizes = frozenset([128, 192, 256]) + + def __init__(self, key: utils.Buffer): + self.key = _verify_key_size(self, key) + + @property + def key_size(self) -> int: + return len(self.key) * 8 + + +utils.deprecated( + ARC4, + __name__, + "ARC4 has been moved to " + "cryptography.hazmat.decrepit.ciphers.algorithms.ARC4 and " + "will be removed from " + "cryptography.hazmat.primitives.ciphers.algorithms in 48.0.0.", + utils.DeprecatedIn43, + name="ARC4", +) + + +utils.deprecated( + TripleDES, + __name__, + "TripleDES has been moved to " + "cryptography.hazmat.decrepit.ciphers.algorithms.TripleDES and " + "will be removed from " + "cryptography.hazmat.primitives.ciphers.algorithms in 48.0.0.", + utils.DeprecatedIn43, + name="TripleDES", +) + + +class ChaCha20(CipherAlgorithm): + name = "ChaCha20" + key_sizes = frozenset([256]) + + def __init__(self, key: utils.Buffer, nonce: utils.Buffer): + self.key = _verify_key_size(self, key) + utils._check_byteslike("nonce", nonce) + + if len(nonce) != 16: + raise ValueError("nonce must be 128-bits (16 bytes)") + + self._nonce = nonce + + @property + def nonce(self) -> utils.Buffer: + return self._nonce + + @property + def key_size(self) -> int: + return len(self.key) * 8 + + +class SM4(BlockCipherAlgorithm): + name = "SM4" + block_size = 128 + key_sizes = frozenset([128]) + + def __init__(self, key: bytes): + self.key = _verify_key_size(self, key) + + @property + def key_size(self) -> int: + return len(self.key) * 8 diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/base.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/base.py new file mode 100644 index 00000000..24fceea2 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/base.py @@ -0,0 +1,146 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc +import typing + +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.primitives._cipheralgorithm import CipherAlgorithm +from cryptography.hazmat.primitives.ciphers import modes +from cryptography.utils import Buffer + + +class CipherContext(metaclass=abc.ABCMeta): + @abc.abstractmethod + def update(self, data: Buffer) -> bytes: + """ + Processes the provided bytes through the cipher and returns the results + as bytes. + """ + + @abc.abstractmethod + def update_into(self, data: Buffer, buf: Buffer) -> int: + """ + Processes the provided bytes and writes the resulting data into the + provided buffer. Returns the number of bytes written. + """ + + @abc.abstractmethod + def finalize(self) -> bytes: + """ + Returns the results of processing the final block as bytes. + """ + + @abc.abstractmethod + def reset_nonce(self, nonce: bytes) -> None: + """ + Resets the nonce for the cipher context to the provided value. + Raises an exception if it does not support reset or if the + provided nonce does not have a valid length. + """ + + +class AEADCipherContext(CipherContext, metaclass=abc.ABCMeta): + @abc.abstractmethod + def authenticate_additional_data(self, data: Buffer) -> None: + """ + Authenticates the provided bytes. + """ + + +class AEADDecryptionContext(AEADCipherContext, metaclass=abc.ABCMeta): + @abc.abstractmethod + def finalize_with_tag(self, tag: bytes) -> bytes: + """ + Returns the results of processing the final block as bytes and allows + delayed passing of the authentication tag. + """ + + +class AEADEncryptionContext(AEADCipherContext, metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def tag(self) -> bytes: + """ + Returns tag bytes. This is only available after encryption is + finalized. + """ + + +Mode = typing.TypeVar( + "Mode", bound=typing.Optional[modes.Mode], covariant=True +) + + +class Cipher(typing.Generic[Mode]): + def __init__( + self, + algorithm: CipherAlgorithm, + mode: Mode, + backend: typing.Any = None, + ) -> None: + if not isinstance(algorithm, CipherAlgorithm): + raise TypeError("Expected interface of CipherAlgorithm.") + + if mode is not None: + # mypy needs this assert to narrow the type from our generic + # type. Maybe it won't some time in the future. + assert isinstance(mode, modes.Mode) + mode.validate_for_algorithm(algorithm) + + self.algorithm = algorithm + self.mode = mode + + @typing.overload + def encryptor( + self: Cipher[modes.ModeWithAuthenticationTag], + ) -> AEADEncryptionContext: ... + + @typing.overload + def encryptor( + self: _CIPHER_TYPE, + ) -> CipherContext: ... + + def encryptor(self): + if isinstance(self.mode, modes.ModeWithAuthenticationTag): + if self.mode.tag is not None: + raise ValueError( + "Authentication tag must be None when encrypting." + ) + + return rust_openssl.ciphers.create_encryption_ctx( + self.algorithm, self.mode + ) + + @typing.overload + def decryptor( + self: Cipher[modes.ModeWithAuthenticationTag], + ) -> AEADDecryptionContext: ... + + @typing.overload + def decryptor( + self: _CIPHER_TYPE, + ) -> CipherContext: ... + + def decryptor(self): + return rust_openssl.ciphers.create_decryption_ctx( + self.algorithm, self.mode + ) + + +_CIPHER_TYPE = Cipher[ + typing.Union[ + modes.ModeWithNonce, + modes.ModeWithTweak, + modes.ECB, + modes.ModeWithInitializationVector, + None, + ] +] + +CipherContext.register(rust_openssl.ciphers.CipherContext) +AEADEncryptionContext.register(rust_openssl.ciphers.AEADEncryptionContext) +AEADDecryptionContext.register(rust_openssl.ciphers.AEADDecryptionContext) diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/modes.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/modes.py new file mode 100644 index 00000000..36c555c6 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/modes.py @@ -0,0 +1,268 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc + +from cryptography import utils +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.primitives._cipheralgorithm import ( + BlockCipherAlgorithm, + CipherAlgorithm, +) +from cryptography.hazmat.primitives.ciphers import algorithms + + +class Mode(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def name(self) -> str: + """ + A string naming this mode (e.g. "ECB", "CBC"). + """ + + @abc.abstractmethod + def validate_for_algorithm(self, algorithm: CipherAlgorithm) -> None: + """ + Checks that all the necessary invariants of this (mode, algorithm) + combination are met. + """ + + +class ModeWithInitializationVector(Mode, metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def initialization_vector(self) -> utils.Buffer: + """ + The value of the initialization vector for this mode as bytes. + """ + + +class ModeWithTweak(Mode, metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def tweak(self) -> utils.Buffer: + """ + The value of the tweak for this mode as bytes. + """ + + +class ModeWithNonce(Mode, metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def nonce(self) -> utils.Buffer: + """ + The value of the nonce for this mode as bytes. + """ + + +class ModeWithAuthenticationTag(Mode, metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def tag(self) -> bytes | None: + """ + The value of the tag supplied to the constructor of this mode. + """ + + +def _check_aes_key_length(self: Mode, algorithm: CipherAlgorithm) -> None: + if algorithm.key_size > 256 and algorithm.name == "AES": + raise ValueError( + "Only 128, 192, and 256 bit keys are allowed for this AES mode" + ) + + +def _check_iv_length( + self: ModeWithInitializationVector, algorithm: BlockCipherAlgorithm +) -> None: + iv_len = len(self.initialization_vector) + if iv_len * 8 != algorithm.block_size: + raise ValueError(f"Invalid IV size ({iv_len}) for {self.name}.") + + +def _check_nonce_length( + nonce: utils.Buffer, name: str, algorithm: CipherAlgorithm +) -> None: + if not isinstance(algorithm, BlockCipherAlgorithm): + raise UnsupportedAlgorithm( + f"{name} requires a block cipher algorithm", + _Reasons.UNSUPPORTED_CIPHER, + ) + if len(nonce) * 8 != algorithm.block_size: + raise ValueError(f"Invalid nonce size ({len(nonce)}) for {name}.") + + +def _check_iv_and_key_length( + self: ModeWithInitializationVector, algorithm: CipherAlgorithm +) -> None: + if not isinstance(algorithm, BlockCipherAlgorithm): + raise UnsupportedAlgorithm( + f"{self} requires a block cipher algorithm", + _Reasons.UNSUPPORTED_CIPHER, + ) + _check_aes_key_length(self, algorithm) + _check_iv_length(self, algorithm) + + +class CBC(ModeWithInitializationVector): + name = "CBC" + + def __init__(self, initialization_vector: utils.Buffer): + utils._check_byteslike("initialization_vector", initialization_vector) + self._initialization_vector = initialization_vector + + @property + def initialization_vector(self) -> utils.Buffer: + return self._initialization_vector + + validate_for_algorithm = _check_iv_and_key_length + + +class XTS(ModeWithTweak): + name = "XTS" + + def __init__(self, tweak: utils.Buffer): + utils._check_byteslike("tweak", tweak) + + if len(tweak) != 16: + raise ValueError("tweak must be 128-bits (16 bytes)") + + self._tweak = tweak + + @property + def tweak(self) -> utils.Buffer: + return self._tweak + + def validate_for_algorithm(self, algorithm: CipherAlgorithm) -> None: + if isinstance(algorithm, (algorithms.AES128, algorithms.AES256)): + raise TypeError( + "The AES128 and AES256 classes do not support XTS, please use " + "the standard AES class instead." + ) + + if algorithm.key_size not in (256, 512): + raise ValueError( + "The XTS specification requires a 256-bit key for AES-128-XTS" + " and 512-bit key for AES-256-XTS" + ) + + +class ECB(Mode): + name = "ECB" + + validate_for_algorithm = _check_aes_key_length + + +class OFB(ModeWithInitializationVector): + name = "OFB" + + def __init__(self, initialization_vector: utils.Buffer): + utils._check_byteslike("initialization_vector", initialization_vector) + self._initialization_vector = initialization_vector + + @property + def initialization_vector(self) -> utils.Buffer: + return self._initialization_vector + + validate_for_algorithm = _check_iv_and_key_length + + +class CFB(ModeWithInitializationVector): + name = "CFB" + + def __init__(self, initialization_vector: utils.Buffer): + utils._check_byteslike("initialization_vector", initialization_vector) + self._initialization_vector = initialization_vector + + @property + def initialization_vector(self) -> utils.Buffer: + return self._initialization_vector + + validate_for_algorithm = _check_iv_and_key_length + + +class CFB8(ModeWithInitializationVector): + name = "CFB8" + + def __init__(self, initialization_vector: utils.Buffer): + utils._check_byteslike("initialization_vector", initialization_vector) + self._initialization_vector = initialization_vector + + @property + def initialization_vector(self) -> utils.Buffer: + return self._initialization_vector + + validate_for_algorithm = _check_iv_and_key_length + + +class CTR(ModeWithNonce): + name = "CTR" + + def __init__(self, nonce: utils.Buffer): + utils._check_byteslike("nonce", nonce) + self._nonce = nonce + + @property + def nonce(self) -> utils.Buffer: + return self._nonce + + def validate_for_algorithm(self, algorithm: CipherAlgorithm) -> None: + _check_aes_key_length(self, algorithm) + _check_nonce_length(self.nonce, self.name, algorithm) + + +class GCM(ModeWithInitializationVector, ModeWithAuthenticationTag): + name = "GCM" + _MAX_ENCRYPTED_BYTES = (2**39 - 256) // 8 + _MAX_AAD_BYTES = (2**64) // 8 + + def __init__( + self, + initialization_vector: utils.Buffer, + tag: bytes | None = None, + min_tag_length: int = 16, + ): + # OpenSSL 3.0.0 constrains GCM IVs to [64, 1024] bits inclusive + # This is a sane limit anyway so we'll enforce it here. + utils._check_byteslike("initialization_vector", initialization_vector) + if len(initialization_vector) < 8 or len(initialization_vector) > 128: + raise ValueError( + "initialization_vector must be between 8 and 128 bytes (64 " + "and 1024 bits)." + ) + self._initialization_vector = initialization_vector + if tag is not None: + utils._check_bytes("tag", tag) + if min_tag_length < 4: + raise ValueError("min_tag_length must be >= 4") + if len(tag) < min_tag_length: + raise ValueError( + f"Authentication tag must be {min_tag_length} bytes or " + "longer." + ) + self._tag = tag + self._min_tag_length = min_tag_length + + @property + def tag(self) -> bytes | None: + return self._tag + + @property + def initialization_vector(self) -> utils.Buffer: + return self._initialization_vector + + def validate_for_algorithm(self, algorithm: CipherAlgorithm) -> None: + _check_aes_key_length(self, algorithm) + if not isinstance(algorithm, BlockCipherAlgorithm): + raise UnsupportedAlgorithm( + "GCM requires a block cipher algorithm", + _Reasons.UNSUPPORTED_CIPHER, + ) + block_size_bytes = algorithm.block_size // 8 + if self._tag is not None and len(self._tag) > block_size_bytes: + raise ValueError( + f"Authentication tag cannot be more than {block_size_bytes} " + "bytes." + ) diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/cmac.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/cmac.py new file mode 100644 index 00000000..2c67ce22 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/cmac.py @@ -0,0 +1,10 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.hazmat.bindings._rust import openssl as rust_openssl + +__all__ = ["CMAC"] +CMAC = rust_openssl.cmac.CMAC diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/constant_time.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/constant_time.py new file mode 100644 index 00000000..3975c714 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/constant_time.py @@ -0,0 +1,14 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import hmac + + +def bytes_eq(a: bytes, b: bytes) -> bool: + if not isinstance(a, bytes) or not isinstance(b, bytes): + raise TypeError("a and b must be bytes.") + + return hmac.compare_digest(a, b) diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/hashes.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/hashes.py new file mode 100644 index 00000000..4b55ec33 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/hashes.py @@ -0,0 +1,246 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc + +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.utils import Buffer + +__all__ = [ + "MD5", + "SHA1", + "SHA3_224", + "SHA3_256", + "SHA3_384", + "SHA3_512", + "SHA224", + "SHA256", + "SHA384", + "SHA512", + "SHA512_224", + "SHA512_256", + "SHAKE128", + "SHAKE256", + "SM3", + "BLAKE2b", + "BLAKE2s", + "ExtendableOutputFunction", + "Hash", + "HashAlgorithm", + "HashContext", + "XOFHash", +] + + +class HashAlgorithm(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def name(self) -> str: + """ + A string naming this algorithm (e.g. "sha256", "md5"). + """ + + @property + @abc.abstractmethod + def digest_size(self) -> int: + """ + The size of the resulting digest in bytes. + """ + + @property + @abc.abstractmethod + def block_size(self) -> int | None: + """ + The internal block size of the hash function, or None if the hash + function does not use blocks internally (e.g. SHA3). + """ + + +class HashContext(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def algorithm(self) -> HashAlgorithm: + """ + A HashAlgorithm that will be used by this context. + """ + + @abc.abstractmethod + def update(self, data: Buffer) -> None: + """ + Processes the provided bytes through the hash. + """ + + @abc.abstractmethod + def finalize(self) -> bytes: + """ + Finalizes the hash context and returns the hash digest as bytes. + """ + + @abc.abstractmethod + def copy(self) -> HashContext: + """ + Return a HashContext that is a copy of the current context. + """ + + +Hash = rust_openssl.hashes.Hash +HashContext.register(Hash) + +XOFHash = rust_openssl.hashes.XOFHash + + +class ExtendableOutputFunction(metaclass=abc.ABCMeta): + """ + An interface for extendable output functions. + """ + + +class SHA1(HashAlgorithm): + name = "sha1" + digest_size = 20 + block_size = 64 + + +class SHA512_224(HashAlgorithm): # noqa: N801 + name = "sha512-224" + digest_size = 28 + block_size = 128 + + +class SHA512_256(HashAlgorithm): # noqa: N801 + name = "sha512-256" + digest_size = 32 + block_size = 128 + + +class SHA224(HashAlgorithm): + name = "sha224" + digest_size = 28 + block_size = 64 + + +class SHA256(HashAlgorithm): + name = "sha256" + digest_size = 32 + block_size = 64 + + +class SHA384(HashAlgorithm): + name = "sha384" + digest_size = 48 + block_size = 128 + + +class SHA512(HashAlgorithm): + name = "sha512" + digest_size = 64 + block_size = 128 + + +class SHA3_224(HashAlgorithm): # noqa: N801 + name = "sha3-224" + digest_size = 28 + block_size = None + + +class SHA3_256(HashAlgorithm): # noqa: N801 + name = "sha3-256" + digest_size = 32 + block_size = None + + +class SHA3_384(HashAlgorithm): # noqa: N801 + name = "sha3-384" + digest_size = 48 + block_size = None + + +class SHA3_512(HashAlgorithm): # noqa: N801 + name = "sha3-512" + digest_size = 64 + block_size = None + + +class SHAKE128(HashAlgorithm, ExtendableOutputFunction): + name = "shake128" + block_size = None + + def __init__(self, digest_size: int): + if not isinstance(digest_size, int): + raise TypeError("digest_size must be an integer") + + if digest_size < 1: + raise ValueError("digest_size must be a positive integer") + + self._digest_size = digest_size + + @property + def digest_size(self) -> int: + return self._digest_size + + +class SHAKE256(HashAlgorithm, ExtendableOutputFunction): + name = "shake256" + block_size = None + + def __init__(self, digest_size: int): + if not isinstance(digest_size, int): + raise TypeError("digest_size must be an integer") + + if digest_size < 1: + raise ValueError("digest_size must be a positive integer") + + self._digest_size = digest_size + + @property + def digest_size(self) -> int: + return self._digest_size + + +class MD5(HashAlgorithm): + name = "md5" + digest_size = 16 + block_size = 64 + + +class BLAKE2b(HashAlgorithm): + name = "blake2b" + _max_digest_size = 64 + _min_digest_size = 1 + block_size = 128 + + def __init__(self, digest_size: int): + if digest_size != 64: + raise ValueError("Digest size must be 64") + + self._digest_size = digest_size + + @property + def digest_size(self) -> int: + return self._digest_size + + +class BLAKE2s(HashAlgorithm): + name = "blake2s" + block_size = 64 + _max_digest_size = 32 + _min_digest_size = 1 + + def __init__(self, digest_size: int): + if digest_size != 32: + raise ValueError("Digest size must be 32") + + self._digest_size = digest_size + + @property + def digest_size(self) -> int: + return self._digest_size + + +class SM3(HashAlgorithm): + name = "sm3" + digest_size = 32 + block_size = 64 diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/hmac.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/hmac.py new file mode 100644 index 00000000..a9442d59 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/hmac.py @@ -0,0 +1,13 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.primitives import hashes + +__all__ = ["HMAC"] + +HMAC = rust_openssl.hmac.HMAC +hashes.HashContext.register(HMAC) diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__init__.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__init__.py new file mode 100644 index 00000000..79bb459f --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__init__.py @@ -0,0 +1,23 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc + + +class KeyDerivationFunction(metaclass=abc.ABCMeta): + @abc.abstractmethod + def derive(self, key_material: bytes) -> bytes: + """ + Deterministically generates and returns a new key based on the existing + key material. + """ + + @abc.abstractmethod + def verify(self, key_material: bytes, expected_key: bytes) -> None: + """ + Checks whether the key generated by the key material matches the + expected derived key. Raises an exception if they do not match. + """ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..eb0adbb8 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/argon2.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/argon2.cpython-312.pyc new file mode 100644 index 00000000..6fd62c44 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/argon2.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-312.pyc new file mode 100644 index 00000000..dce9bfee Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-312.pyc new file mode 100644 index 00000000..033f03aa Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/kbkdf.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/kbkdf.cpython-312.pyc new file mode 100644 index 00000000..76d76f48 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/kbkdf.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-312.pyc new file mode 100644 index 00000000..e8d69cef Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/scrypt.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/scrypt.cpython-312.pyc new file mode 100644 index 00000000..12224f29 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/scrypt.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-312.pyc new file mode 100644 index 00000000..9dff1c5f Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/argon2.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/argon2.py new file mode 100644 index 00000000..405fc8df --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/argon2.py @@ -0,0 +1,13 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.primitives.kdf import KeyDerivationFunction + +Argon2id = rust_openssl.kdf.Argon2id +KeyDerivationFunction.register(Argon2id) + +__all__ = ["Argon2id"] diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/concatkdf.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/concatkdf.py new file mode 100644 index 00000000..1b928415 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/concatkdf.py @@ -0,0 +1,125 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import typing +from collections.abc import Callable + +from cryptography import utils +from cryptography.exceptions import AlreadyFinalized, InvalidKey +from cryptography.hazmat.primitives import constant_time, hashes, hmac +from cryptography.hazmat.primitives.kdf import KeyDerivationFunction + + +def _int_to_u32be(n: int) -> bytes: + return n.to_bytes(length=4, byteorder="big") + + +def _common_args_checks( + algorithm: hashes.HashAlgorithm, + length: int, + otherinfo: bytes | None, +) -> None: + max_length = algorithm.digest_size * (2**32 - 1) + if length > max_length: + raise ValueError(f"Cannot derive keys larger than {max_length} bits.") + if otherinfo is not None: + utils._check_bytes("otherinfo", otherinfo) + + +def _concatkdf_derive( + key_material: utils.Buffer, + length: int, + auxfn: Callable[[], hashes.HashContext], + otherinfo: bytes, +) -> bytes: + utils._check_byteslike("key_material", key_material) + output = [b""] + outlen = 0 + counter = 1 + + while length > outlen: + h = auxfn() + h.update(_int_to_u32be(counter)) + h.update(key_material) + h.update(otherinfo) + output.append(h.finalize()) + outlen += len(output[-1]) + counter += 1 + + return b"".join(output)[:length] + + +class ConcatKDFHash(KeyDerivationFunction): + def __init__( + self, + algorithm: hashes.HashAlgorithm, + length: int, + otherinfo: bytes | None, + backend: typing.Any = None, + ): + _common_args_checks(algorithm, length, otherinfo) + self._algorithm = algorithm + self._length = length + self._otherinfo: bytes = otherinfo if otherinfo is not None else b"" + + self._used = False + + def _hash(self) -> hashes.Hash: + return hashes.Hash(self._algorithm) + + def derive(self, key_material: utils.Buffer) -> bytes: + if self._used: + raise AlreadyFinalized + self._used = True + return _concatkdf_derive( + key_material, self._length, self._hash, self._otherinfo + ) + + def verify(self, key_material: bytes, expected_key: bytes) -> None: + if not constant_time.bytes_eq(self.derive(key_material), expected_key): + raise InvalidKey + + +class ConcatKDFHMAC(KeyDerivationFunction): + def __init__( + self, + algorithm: hashes.HashAlgorithm, + length: int, + salt: bytes | None, + otherinfo: bytes | None, + backend: typing.Any = None, + ): + _common_args_checks(algorithm, length, otherinfo) + self._algorithm = algorithm + self._length = length + self._otherinfo: bytes = otherinfo if otherinfo is not None else b"" + + if algorithm.block_size is None: + raise TypeError(f"{algorithm.name} is unsupported for ConcatKDF") + + if salt is None: + salt = b"\x00" * algorithm.block_size + else: + utils._check_bytes("salt", salt) + + self._salt = salt + + self._used = False + + def _hmac(self) -> hmac.HMAC: + return hmac.HMAC(self._salt, self._algorithm) + + def derive(self, key_material: utils.Buffer) -> bytes: + if self._used: + raise AlreadyFinalized + self._used = True + return _concatkdf_derive( + key_material, self._length, self._hmac, self._otherinfo + ) + + def verify(self, key_material: bytes, expected_key: bytes) -> None: + if not constant_time.bytes_eq(self.derive(key_material), expected_key): + raise InvalidKey diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/hkdf.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/hkdf.py new file mode 100644 index 00000000..1e162d9d --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/hkdf.py @@ -0,0 +1,16 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.primitives.kdf import KeyDerivationFunction + +HKDF = rust_openssl.kdf.HKDF +HKDFExpand = rust_openssl.kdf.HKDFExpand + +KeyDerivationFunction.register(HKDF) +KeyDerivationFunction.register(HKDFExpand) + +__all__ = ["HKDF", "HKDFExpand"] diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/kbkdf.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/kbkdf.py new file mode 100644 index 00000000..5b471376 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/kbkdf.py @@ -0,0 +1,303 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import typing +from collections.abc import Callable + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, + InvalidKey, + UnsupportedAlgorithm, + _Reasons, +) +from cryptography.hazmat.primitives import ( + ciphers, + cmac, + constant_time, + hashes, + hmac, +) +from cryptography.hazmat.primitives.kdf import KeyDerivationFunction + + +class Mode(utils.Enum): + CounterMode = "ctr" + + +class CounterLocation(utils.Enum): + BeforeFixed = "before_fixed" + AfterFixed = "after_fixed" + MiddleFixed = "middle_fixed" + + +class _KBKDFDeriver: + def __init__( + self, + prf: Callable, + mode: Mode, + length: int, + rlen: int, + llen: int | None, + location: CounterLocation, + break_location: int | None, + label: bytes | None, + context: bytes | None, + fixed: bytes | None, + ): + assert callable(prf) + + if not isinstance(mode, Mode): + raise TypeError("mode must be of type Mode") + + if not isinstance(location, CounterLocation): + raise TypeError("location must be of type CounterLocation") + + if break_location is None and location is CounterLocation.MiddleFixed: + raise ValueError("Please specify a break_location") + + if ( + break_location is not None + and location != CounterLocation.MiddleFixed + ): + raise ValueError( + "break_location is ignored when location is not" + " CounterLocation.MiddleFixed" + ) + + if break_location is not None and not isinstance(break_location, int): + raise TypeError("break_location must be an integer") + + if break_location is not None and break_location < 0: + raise ValueError("break_location must be a positive integer") + + if (label or context) and fixed: + raise ValueError( + "When supplying fixed data, label and context are ignored." + ) + + if rlen is None or not self._valid_byte_length(rlen): + raise ValueError("rlen must be between 1 and 4") + + if llen is None and fixed is None: + raise ValueError("Please specify an llen") + + if llen is not None and not isinstance(llen, int): + raise TypeError("llen must be an integer") + + if llen == 0: + raise ValueError("llen must be non-zero") + + if label is None: + label = b"" + + if context is None: + context = b"" + + utils._check_bytes("label", label) + utils._check_bytes("context", context) + self._prf = prf + self._mode = mode + self._length = length + self._rlen = rlen + self._llen = llen + self._location = location + self._break_location = break_location + self._label = label + self._context = context + self._used = False + self._fixed_data = fixed + + @staticmethod + def _valid_byte_length(value: int) -> bool: + if not isinstance(value, int): + raise TypeError("value must be of type int") + + value_bin = utils.int_to_bytes(1, value) + return 1 <= len(value_bin) <= 4 + + def derive( + self, key_material: utils.Buffer, prf_output_size: int + ) -> bytes: + if self._used: + raise AlreadyFinalized + + utils._check_byteslike("key_material", key_material) + self._used = True + + # inverse floor division (equivalent to ceiling) + rounds = -(-self._length // prf_output_size) + + output = [b""] + + # For counter mode, the number of iterations shall not be + # larger than 2^r-1, where r <= 32 is the binary length of the counter + # This ensures that the counter values used as an input to the + # PRF will not repeat during a particular call to the KDF function. + r_bin = utils.int_to_bytes(1, self._rlen) + if rounds > pow(2, len(r_bin) * 8) - 1: + raise ValueError("There are too many iterations.") + + fixed = self._generate_fixed_input() + + if self._location == CounterLocation.BeforeFixed: + data_before_ctr = b"" + data_after_ctr = fixed + elif self._location == CounterLocation.AfterFixed: + data_before_ctr = fixed + data_after_ctr = b"" + else: + if isinstance( + self._break_location, int + ) and self._break_location > len(fixed): + raise ValueError("break_location offset > len(fixed)") + data_before_ctr = fixed[: self._break_location] + data_after_ctr = fixed[self._break_location :] + + for i in range(1, rounds + 1): + h = self._prf(key_material) + + counter = utils.int_to_bytes(i, self._rlen) + input_data = data_before_ctr + counter + data_after_ctr + + h.update(input_data) + + output.append(h.finalize()) + + return b"".join(output)[: self._length] + + def _generate_fixed_input(self) -> bytes: + if self._fixed_data and isinstance(self._fixed_data, bytes): + return self._fixed_data + + l_val = utils.int_to_bytes(self._length * 8, self._llen) + + return b"".join([self._label, b"\x00", self._context, l_val]) + + +class KBKDFHMAC(KeyDerivationFunction): + def __init__( + self, + algorithm: hashes.HashAlgorithm, + mode: Mode, + length: int, + rlen: int, + llen: int | None, + location: CounterLocation, + label: bytes | None, + context: bytes | None, + fixed: bytes | None, + backend: typing.Any = None, + *, + break_location: int | None = None, + ): + if not isinstance(algorithm, hashes.HashAlgorithm): + raise UnsupportedAlgorithm( + "Algorithm supplied is not a supported hash algorithm.", + _Reasons.UNSUPPORTED_HASH, + ) + + from cryptography.hazmat.backends.openssl.backend import ( + backend as ossl, + ) + + if not ossl.hmac_supported(algorithm): + raise UnsupportedAlgorithm( + "Algorithm supplied is not a supported hmac algorithm.", + _Reasons.UNSUPPORTED_HASH, + ) + + self._algorithm = algorithm + + self._deriver = _KBKDFDeriver( + self._prf, + mode, + length, + rlen, + llen, + location, + break_location, + label, + context, + fixed, + ) + + def _prf(self, key_material: bytes) -> hmac.HMAC: + return hmac.HMAC(key_material, self._algorithm) + + def derive(self, key_material: utils.Buffer) -> bytes: + return self._deriver.derive(key_material, self._algorithm.digest_size) + + def verify(self, key_material: bytes, expected_key: bytes) -> None: + if not constant_time.bytes_eq(self.derive(key_material), expected_key): + raise InvalidKey + + +class KBKDFCMAC(KeyDerivationFunction): + def __init__( + self, + algorithm, + mode: Mode, + length: int, + rlen: int, + llen: int | None, + location: CounterLocation, + label: bytes | None, + context: bytes | None, + fixed: bytes | None, + backend: typing.Any = None, + *, + break_location: int | None = None, + ): + if not issubclass( + algorithm, ciphers.BlockCipherAlgorithm + ) or not issubclass(algorithm, ciphers.CipherAlgorithm): + raise UnsupportedAlgorithm( + "Algorithm supplied is not a supported cipher algorithm.", + _Reasons.UNSUPPORTED_CIPHER, + ) + + self._algorithm = algorithm + self._cipher: ciphers.BlockCipherAlgorithm | None = None + + self._deriver = _KBKDFDeriver( + self._prf, + mode, + length, + rlen, + llen, + location, + break_location, + label, + context, + fixed, + ) + + def _prf(self, _: bytes) -> cmac.CMAC: + assert self._cipher is not None + + return cmac.CMAC(self._cipher) + + def derive(self, key_material: utils.Buffer) -> bytes: + self._cipher = self._algorithm(key_material) + + assert self._cipher is not None + + from cryptography.hazmat.backends.openssl.backend import ( + backend as ossl, + ) + + if not ossl.cmac_algorithm_supported(self._cipher): + raise UnsupportedAlgorithm( + "Algorithm supplied is not a supported cipher algorithm.", + _Reasons.UNSUPPORTED_CIPHER, + ) + + return self._deriver.derive(key_material, self._cipher.block_size // 8) + + def verify(self, key_material: bytes, expected_key: bytes) -> None: + if not constant_time.bytes_eq(self.derive(key_material), expected_key): + raise InvalidKey diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/pbkdf2.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/pbkdf2.py new file mode 100644 index 00000000..d539f131 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/pbkdf2.py @@ -0,0 +1,62 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import typing + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, + InvalidKey, + UnsupportedAlgorithm, + _Reasons, +) +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.primitives import constant_time, hashes +from cryptography.hazmat.primitives.kdf import KeyDerivationFunction + + +class PBKDF2HMAC(KeyDerivationFunction): + def __init__( + self, + algorithm: hashes.HashAlgorithm, + length: int, + salt: bytes, + iterations: int, + backend: typing.Any = None, + ): + from cryptography.hazmat.backends.openssl.backend import ( + backend as ossl, + ) + + if not ossl.pbkdf2_hmac_supported(algorithm): + raise UnsupportedAlgorithm( + f"{algorithm.name} is not supported for PBKDF2.", + _Reasons.UNSUPPORTED_HASH, + ) + self._used = False + self._algorithm = algorithm + self._length = length + utils._check_bytes("salt", salt) + self._salt = salt + self._iterations = iterations + + def derive(self, key_material: utils.Buffer) -> bytes: + if self._used: + raise AlreadyFinalized("PBKDF2 instances can only be used once.") + self._used = True + + return rust_openssl.kdf.derive_pbkdf2_hmac( + key_material, + self._algorithm, + self._salt, + self._iterations, + self._length, + ) + + def verify(self, key_material: bytes, expected_key: bytes) -> None: + derived_key = self.derive(key_material) + if not constant_time.bytes_eq(derived_key, expected_key): + raise InvalidKey("Keys do not match.") diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/scrypt.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/scrypt.py new file mode 100644 index 00000000..f791ceea --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/scrypt.py @@ -0,0 +1,19 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import sys + +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.primitives.kdf import KeyDerivationFunction + +# This is used by the scrypt tests to skip tests that require more memory +# than the MEM_LIMIT +_MEM_LIMIT = sys.maxsize // 2 + +Scrypt = rust_openssl.kdf.Scrypt +KeyDerivationFunction.register(Scrypt) + +__all__ = ["Scrypt"] diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/x963kdf.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/x963kdf.py new file mode 100644 index 00000000..63870cdc --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/x963kdf.py @@ -0,0 +1,61 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import typing + +from cryptography import utils +from cryptography.exceptions import AlreadyFinalized, InvalidKey +from cryptography.hazmat.primitives import constant_time, hashes +from cryptography.hazmat.primitives.kdf import KeyDerivationFunction + + +def _int_to_u32be(n: int) -> bytes: + return n.to_bytes(length=4, byteorder="big") + + +class X963KDF(KeyDerivationFunction): + def __init__( + self, + algorithm: hashes.HashAlgorithm, + length: int, + sharedinfo: bytes | None, + backend: typing.Any = None, + ): + max_len = algorithm.digest_size * (2**32 - 1) + if length > max_len: + raise ValueError(f"Cannot derive keys larger than {max_len} bits.") + if sharedinfo is not None: + utils._check_bytes("sharedinfo", sharedinfo) + + self._algorithm = algorithm + self._length = length + self._sharedinfo = sharedinfo + self._used = False + + def derive(self, key_material: utils.Buffer) -> bytes: + if self._used: + raise AlreadyFinalized + self._used = True + utils._check_byteslike("key_material", key_material) + output = [b""] + outlen = 0 + counter = 1 + + while self._length > outlen: + h = hashes.Hash(self._algorithm) + h.update(key_material) + h.update(_int_to_u32be(counter)) + if self._sharedinfo is not None: + h.update(self._sharedinfo) + output.append(h.finalize()) + outlen += len(output[-1]) + counter += 1 + + return b"".join(output)[: self._length] + + def verify(self, key_material: bytes, expected_key: bytes) -> None: + if not constant_time.bytes_eq(self.derive(key_material), expected_key): + raise InvalidKey diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/keywrap.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/keywrap.py new file mode 100644 index 00000000..b93d87d3 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/keywrap.py @@ -0,0 +1,177 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import typing + +from cryptography.hazmat.primitives.ciphers import Cipher +from cryptography.hazmat.primitives.ciphers.algorithms import AES +from cryptography.hazmat.primitives.ciphers.modes import ECB +from cryptography.hazmat.primitives.constant_time import bytes_eq + + +def _wrap_core( + wrapping_key: bytes, + a: bytes, + r: list[bytes], +) -> bytes: + # RFC 3394 Key Wrap - 2.2.1 (index method) + encryptor = Cipher(AES(wrapping_key), ECB()).encryptor() + n = len(r) + for j in range(6): + for i in range(n): + # every encryption operation is a discrete 16 byte chunk (because + # AES has a 128-bit block size) and since we're using ECB it is + # safe to reuse the encryptor for the entire operation + b = encryptor.update(a + r[i]) + a = ( + int.from_bytes(b[:8], byteorder="big") ^ ((n * j) + i + 1) + ).to_bytes(length=8, byteorder="big") + r[i] = b[-8:] + + assert encryptor.finalize() == b"" + + return a + b"".join(r) + + +def aes_key_wrap( + wrapping_key: bytes, + key_to_wrap: bytes, + backend: typing.Any = None, +) -> bytes: + if len(wrapping_key) not in [16, 24, 32]: + raise ValueError("The wrapping key must be a valid AES key length") + + if len(key_to_wrap) < 16: + raise ValueError("The key to wrap must be at least 16 bytes") + + if len(key_to_wrap) % 8 != 0: + raise ValueError("The key to wrap must be a multiple of 8 bytes") + + a = b"\xa6\xa6\xa6\xa6\xa6\xa6\xa6\xa6" + r = [key_to_wrap[i : i + 8] for i in range(0, len(key_to_wrap), 8)] + return _wrap_core(wrapping_key, a, r) + + +def _unwrap_core( + wrapping_key: bytes, + a: bytes, + r: list[bytes], +) -> tuple[bytes, list[bytes]]: + # Implement RFC 3394 Key Unwrap - 2.2.2 (index method) + decryptor = Cipher(AES(wrapping_key), ECB()).decryptor() + n = len(r) + for j in reversed(range(6)): + for i in reversed(range(n)): + atr = ( + int.from_bytes(a, byteorder="big") ^ ((n * j) + i + 1) + ).to_bytes(length=8, byteorder="big") + r[i] + # every decryption operation is a discrete 16 byte chunk so + # it is safe to reuse the decryptor for the entire operation + b = decryptor.update(atr) + a = b[:8] + r[i] = b[-8:] + + assert decryptor.finalize() == b"" + return a, r + + +def aes_key_wrap_with_padding( + wrapping_key: bytes, + key_to_wrap: bytes, + backend: typing.Any = None, +) -> bytes: + if len(wrapping_key) not in [16, 24, 32]: + raise ValueError("The wrapping key must be a valid AES key length") + + aiv = b"\xa6\x59\x59\xa6" + len(key_to_wrap).to_bytes( + length=4, byteorder="big" + ) + # pad the key to wrap if necessary + pad = (8 - (len(key_to_wrap) % 8)) % 8 + key_to_wrap = key_to_wrap + b"\x00" * pad + if len(key_to_wrap) == 8: + # RFC 5649 - 4.1 - exactly 8 octets after padding + encryptor = Cipher(AES(wrapping_key), ECB()).encryptor() + b = encryptor.update(aiv + key_to_wrap) + assert encryptor.finalize() == b"" + return b + else: + r = [key_to_wrap[i : i + 8] for i in range(0, len(key_to_wrap), 8)] + return _wrap_core(wrapping_key, aiv, r) + + +def aes_key_unwrap_with_padding( + wrapping_key: bytes, + wrapped_key: bytes, + backend: typing.Any = None, +) -> bytes: + if len(wrapped_key) < 16: + raise InvalidUnwrap("Must be at least 16 bytes") + + if len(wrapping_key) not in [16, 24, 32]: + raise ValueError("The wrapping key must be a valid AES key length") + + if len(wrapped_key) == 16: + # RFC 5649 - 4.2 - exactly two 64-bit blocks + decryptor = Cipher(AES(wrapping_key), ECB()).decryptor() + out = decryptor.update(wrapped_key) + assert decryptor.finalize() == b"" + a = out[:8] + data = out[8:] + n = 1 + else: + r = [wrapped_key[i : i + 8] for i in range(0, len(wrapped_key), 8)] + encrypted_aiv = r.pop(0) + n = len(r) + a, r = _unwrap_core(wrapping_key, encrypted_aiv, r) + data = b"".join(r) + + # 1) Check that MSB(32,A) = A65959A6. + # 2) Check that 8*(n-1) < LSB(32,A) <= 8*n. If so, let + # MLI = LSB(32,A). + # 3) Let b = (8*n)-MLI, and then check that the rightmost b octets of + # the output data are zero. + mli = int.from_bytes(a[4:], byteorder="big") + b = (8 * n) - mli + if ( + not bytes_eq(a[:4], b"\xa6\x59\x59\xa6") + or not 8 * (n - 1) < mli <= 8 * n + or (b != 0 and not bytes_eq(data[-b:], b"\x00" * b)) + ): + raise InvalidUnwrap() + + if b == 0: + return data + else: + return data[:-b] + + +def aes_key_unwrap( + wrapping_key: bytes, + wrapped_key: bytes, + backend: typing.Any = None, +) -> bytes: + if len(wrapped_key) < 24: + raise InvalidUnwrap("Must be at least 24 bytes") + + if len(wrapped_key) % 8 != 0: + raise InvalidUnwrap("The wrapped key must be a multiple of 8 bytes") + + if len(wrapping_key) not in [16, 24, 32]: + raise ValueError("The wrapping key must be a valid AES key length") + + aiv = b"\xa6\xa6\xa6\xa6\xa6\xa6\xa6\xa6" + r = [wrapped_key[i : i + 8] for i in range(0, len(wrapped_key), 8)] + a = r.pop(0) + a, r = _unwrap_core(wrapping_key, a, r) + if not bytes_eq(a, aiv): + raise InvalidUnwrap() + + return b"".join(r) + + +class InvalidUnwrap(Exception): + pass diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/padding.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/padding.py new file mode 100644 index 00000000..f9cd1f13 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/padding.py @@ -0,0 +1,69 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc + +from cryptography import utils +from cryptography.hazmat.bindings._rust import ( + ANSIX923PaddingContext, + ANSIX923UnpaddingContext, + PKCS7PaddingContext, + PKCS7UnpaddingContext, +) + + +class PaddingContext(metaclass=abc.ABCMeta): + @abc.abstractmethod + def update(self, data: utils.Buffer) -> bytes: + """ + Pads the provided bytes and returns any available data as bytes. + """ + + @abc.abstractmethod + def finalize(self) -> bytes: + """ + Finalize the padding, returns bytes. + """ + + +def _byte_padding_check(block_size: int) -> None: + if not (0 <= block_size <= 2040): + raise ValueError("block_size must be in range(0, 2041).") + + if block_size % 8 != 0: + raise ValueError("block_size must be a multiple of 8.") + + +class PKCS7: + def __init__(self, block_size: int): + _byte_padding_check(block_size) + self.block_size = block_size + + def padder(self) -> PaddingContext: + return PKCS7PaddingContext(self.block_size) + + def unpadder(self) -> PaddingContext: + return PKCS7UnpaddingContext(self.block_size) + + +PaddingContext.register(PKCS7PaddingContext) +PaddingContext.register(PKCS7UnpaddingContext) + + +class ANSIX923: + def __init__(self, block_size: int): + _byte_padding_check(block_size) + self.block_size = block_size + + def padder(self) -> PaddingContext: + return ANSIX923PaddingContext(self.block_size) + + def unpadder(self) -> PaddingContext: + return ANSIX923UnpaddingContext(self.block_size) + + +PaddingContext.register(ANSIX923PaddingContext) +PaddingContext.register(ANSIX923UnpaddingContext) diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/poly1305.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/poly1305.py new file mode 100644 index 00000000..7f5a77a5 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/poly1305.py @@ -0,0 +1,11 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.hazmat.bindings._rust import openssl as rust_openssl + +__all__ = ["Poly1305"] + +Poly1305 = rust_openssl.poly1305.Poly1305 diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__init__.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__init__.py new file mode 100644 index 00000000..62283cc7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__init__.py @@ -0,0 +1,65 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.hazmat.primitives._serialization import ( + BestAvailableEncryption, + Encoding, + KeySerializationEncryption, + NoEncryption, + ParameterFormat, + PrivateFormat, + PublicFormat, + _KeySerializationEncryption, +) +from cryptography.hazmat.primitives.serialization.base import ( + load_der_parameters, + load_der_private_key, + load_der_public_key, + load_pem_parameters, + load_pem_private_key, + load_pem_public_key, +) +from cryptography.hazmat.primitives.serialization.ssh import ( + SSHCertificate, + SSHCertificateBuilder, + SSHCertificateType, + SSHCertPrivateKeyTypes, + SSHCertPublicKeyTypes, + SSHPrivateKeyTypes, + SSHPublicKeyTypes, + load_ssh_private_key, + load_ssh_public_identity, + load_ssh_public_key, + ssh_key_fingerprint, +) + +__all__ = [ + "BestAvailableEncryption", + "Encoding", + "KeySerializationEncryption", + "NoEncryption", + "ParameterFormat", + "PrivateFormat", + "PublicFormat", + "SSHCertPrivateKeyTypes", + "SSHCertPublicKeyTypes", + "SSHCertificate", + "SSHCertificateBuilder", + "SSHCertificateType", + "SSHPrivateKeyTypes", + "SSHPublicKeyTypes", + "_KeySerializationEncryption", + "load_der_parameters", + "load_der_private_key", + "load_der_public_key", + "load_pem_parameters", + "load_pem_private_key", + "load_pem_public_key", + "load_ssh_private_key", + "load_ssh_public_identity", + "load_ssh_public_key", + "ssh_key_fingerprint", +] diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..df90a7ff Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/base.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/base.cpython-312.pyc new file mode 100644 index 00000000..5be75ddc Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/base.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs12.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs12.cpython-312.pyc new file mode 100644 index 00000000..78d86598 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs12.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs7.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs7.cpython-312.pyc new file mode 100644 index 00000000..bced50a7 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs7.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/ssh.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/ssh.cpython-312.pyc new file mode 100644 index 00000000..f368f005 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/ssh.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/base.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/base.py new file mode 100644 index 00000000..e7c998b7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/base.py @@ -0,0 +1,14 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from cryptography.hazmat.bindings._rust import openssl as rust_openssl + +load_pem_private_key = rust_openssl.keys.load_pem_private_key +load_der_private_key = rust_openssl.keys.load_der_private_key + +load_pem_public_key = rust_openssl.keys.load_pem_public_key +load_der_public_key = rust_openssl.keys.load_der_public_key + +load_pem_parameters = rust_openssl.dh.from_pem_parameters +load_der_parameters = rust_openssl.dh.from_der_parameters diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/pkcs12.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/pkcs12.py new file mode 100644 index 00000000..58884ff6 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/pkcs12.py @@ -0,0 +1,176 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import typing +from collections.abc import Iterable + +from cryptography import x509 +from cryptography.hazmat.bindings._rust import pkcs12 as rust_pkcs12 +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives._serialization import PBES as PBES +from cryptography.hazmat.primitives.asymmetric import ( + dsa, + ec, + ed448, + ed25519, + rsa, +) +from cryptography.hazmat.primitives.asymmetric.types import PrivateKeyTypes + +__all__ = [ + "PBES", + "PKCS12Certificate", + "PKCS12KeyAndCertificates", + "PKCS12PrivateKeyTypes", + "load_key_and_certificates", + "load_pkcs12", + "serialize_java_truststore", + "serialize_key_and_certificates", +] + +PKCS12PrivateKeyTypes = typing.Union[ + rsa.RSAPrivateKey, + dsa.DSAPrivateKey, + ec.EllipticCurvePrivateKey, + ed25519.Ed25519PrivateKey, + ed448.Ed448PrivateKey, +] + + +PKCS12Certificate = rust_pkcs12.PKCS12Certificate + + +class PKCS12KeyAndCertificates: + def __init__( + self, + key: PrivateKeyTypes | None, + cert: PKCS12Certificate | None, + additional_certs: list[PKCS12Certificate], + ): + if key is not None and not isinstance( + key, + ( + rsa.RSAPrivateKey, + dsa.DSAPrivateKey, + ec.EllipticCurvePrivateKey, + ed25519.Ed25519PrivateKey, + ed448.Ed448PrivateKey, + ), + ): + raise TypeError( + "Key must be RSA, DSA, EllipticCurve, ED25519, or ED448" + " private key, or None." + ) + if cert is not None and not isinstance(cert, PKCS12Certificate): + raise TypeError("cert must be a PKCS12Certificate object or None") + if not all( + isinstance(add_cert, PKCS12Certificate) + for add_cert in additional_certs + ): + raise TypeError( + "all values in additional_certs must be PKCS12Certificate" + " objects" + ) + self._key = key + self._cert = cert + self._additional_certs = additional_certs + + @property + def key(self) -> PrivateKeyTypes | None: + return self._key + + @property + def cert(self) -> PKCS12Certificate | None: + return self._cert + + @property + def additional_certs(self) -> list[PKCS12Certificate]: + return self._additional_certs + + def __eq__(self, other: object) -> bool: + if not isinstance(other, PKCS12KeyAndCertificates): + return NotImplemented + + return ( + self.key == other.key + and self.cert == other.cert + and self.additional_certs == other.additional_certs + ) + + def __hash__(self) -> int: + return hash((self.key, self.cert, tuple(self.additional_certs))) + + def __repr__(self) -> str: + fmt = ( + "" + ) + return fmt.format(self.key, self.cert, self.additional_certs) + + +load_key_and_certificates = rust_pkcs12.load_key_and_certificates +load_pkcs12 = rust_pkcs12.load_pkcs12 + + +_PKCS12CATypes = typing.Union[ + x509.Certificate, + PKCS12Certificate, +] + + +def serialize_java_truststore( + certs: Iterable[PKCS12Certificate], + encryption_algorithm: serialization.KeySerializationEncryption, +) -> bytes: + if not certs: + raise ValueError("You must supply at least one cert") + + if not isinstance( + encryption_algorithm, serialization.KeySerializationEncryption + ): + raise TypeError( + "Key encryption algorithm must be a " + "KeySerializationEncryption instance" + ) + + return rust_pkcs12.serialize_java_truststore(certs, encryption_algorithm) + + +def serialize_key_and_certificates( + name: bytes | None, + key: PKCS12PrivateKeyTypes | None, + cert: x509.Certificate | None, + cas: Iterable[_PKCS12CATypes] | None, + encryption_algorithm: serialization.KeySerializationEncryption, +) -> bytes: + if key is not None and not isinstance( + key, + ( + rsa.RSAPrivateKey, + dsa.DSAPrivateKey, + ec.EllipticCurvePrivateKey, + ed25519.Ed25519PrivateKey, + ed448.Ed448PrivateKey, + ), + ): + raise TypeError( + "Key must be RSA, DSA, EllipticCurve, ED25519, or ED448" + " private key, or None." + ) + + if not isinstance( + encryption_algorithm, serialization.KeySerializationEncryption + ): + raise TypeError( + "Key encryption algorithm must be a " + "KeySerializationEncryption instance" + ) + + if key is None and cert is None and not cas: + raise ValueError("You must supply at least one of key, cert, or cas") + + return rust_pkcs12.serialize_key_and_certificates( + name, key, cert, cas, encryption_algorithm + ) diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/pkcs7.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/pkcs7.py new file mode 100644 index 00000000..456dc5b0 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/pkcs7.py @@ -0,0 +1,411 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import email.base64mime +import email.generator +import email.message +import email.policy +import io +import typing +from collections.abc import Iterable + +from cryptography import utils, x509 +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.bindings._rust import pkcs7 as rust_pkcs7 +from cryptography.hazmat.primitives import hashes, serialization +from cryptography.hazmat.primitives.asymmetric import ec, padding, rsa +from cryptography.hazmat.primitives.ciphers import ( + algorithms, +) +from cryptography.utils import _check_byteslike + +load_pem_pkcs7_certificates = rust_pkcs7.load_pem_pkcs7_certificates + +load_der_pkcs7_certificates = rust_pkcs7.load_der_pkcs7_certificates + +serialize_certificates = rust_pkcs7.serialize_certificates + +PKCS7HashTypes = typing.Union[ + hashes.SHA224, + hashes.SHA256, + hashes.SHA384, + hashes.SHA512, +] + +PKCS7PrivateKeyTypes = typing.Union[ + rsa.RSAPrivateKey, ec.EllipticCurvePrivateKey +] + +ContentEncryptionAlgorithm = typing.Union[ + typing.Type[algorithms.AES128], typing.Type[algorithms.AES256] +] + + +class PKCS7Options(utils.Enum): + Text = "Add text/plain MIME type" + Binary = "Don't translate input data into canonical MIME format" + DetachedSignature = "Don't embed data in the PKCS7 structure" + NoCapabilities = "Don't embed SMIME capabilities" + NoAttributes = "Don't embed authenticatedAttributes" + NoCerts = "Don't embed signer certificate" + + +class PKCS7SignatureBuilder: + def __init__( + self, + data: utils.Buffer | None = None, + signers: list[ + tuple[ + x509.Certificate, + PKCS7PrivateKeyTypes, + PKCS7HashTypes, + padding.PSS | padding.PKCS1v15 | None, + ] + ] = [], + additional_certs: list[x509.Certificate] = [], + ): + self._data = data + self._signers = signers + self._additional_certs = additional_certs + + def set_data(self, data: utils.Buffer) -> PKCS7SignatureBuilder: + _check_byteslike("data", data) + if self._data is not None: + raise ValueError("data may only be set once") + + return PKCS7SignatureBuilder(data, self._signers) + + def add_signer( + self, + certificate: x509.Certificate, + private_key: PKCS7PrivateKeyTypes, + hash_algorithm: PKCS7HashTypes, + *, + rsa_padding: padding.PSS | padding.PKCS1v15 | None = None, + ) -> PKCS7SignatureBuilder: + if not isinstance( + hash_algorithm, + ( + hashes.SHA224, + hashes.SHA256, + hashes.SHA384, + hashes.SHA512, + ), + ): + raise TypeError( + "hash_algorithm must be one of hashes.SHA224, " + "SHA256, SHA384, or SHA512" + ) + if not isinstance(certificate, x509.Certificate): + raise TypeError("certificate must be a x509.Certificate") + + if not isinstance( + private_key, (rsa.RSAPrivateKey, ec.EllipticCurvePrivateKey) + ): + raise TypeError("Only RSA & EC keys are supported at this time.") + + if rsa_padding is not None: + if not isinstance(rsa_padding, (padding.PSS, padding.PKCS1v15)): + raise TypeError("Padding must be PSS or PKCS1v15") + if not isinstance(private_key, rsa.RSAPrivateKey): + raise TypeError("Padding is only supported for RSA keys") + + return PKCS7SignatureBuilder( + self._data, + [ + *self._signers, + (certificate, private_key, hash_algorithm, rsa_padding), + ], + ) + + def add_certificate( + self, certificate: x509.Certificate + ) -> PKCS7SignatureBuilder: + if not isinstance(certificate, x509.Certificate): + raise TypeError("certificate must be a x509.Certificate") + + return PKCS7SignatureBuilder( + self._data, self._signers, [*self._additional_certs, certificate] + ) + + def sign( + self, + encoding: serialization.Encoding, + options: Iterable[PKCS7Options], + backend: typing.Any = None, + ) -> bytes: + if len(self._signers) == 0: + raise ValueError("Must have at least one signer") + if self._data is None: + raise ValueError("You must add data to sign") + options = list(options) + if not all(isinstance(x, PKCS7Options) for x in options): + raise ValueError("options must be from the PKCS7Options enum") + if encoding not in ( + serialization.Encoding.PEM, + serialization.Encoding.DER, + serialization.Encoding.SMIME, + ): + raise ValueError( + "Must be PEM, DER, or SMIME from the Encoding enum" + ) + + # Text is a meaningless option unless it is accompanied by + # DetachedSignature + if ( + PKCS7Options.Text in options + and PKCS7Options.DetachedSignature not in options + ): + raise ValueError( + "When passing the Text option you must also pass " + "DetachedSignature" + ) + + if PKCS7Options.Text in options and encoding in ( + serialization.Encoding.DER, + serialization.Encoding.PEM, + ): + raise ValueError( + "The Text option is only available for SMIME serialization" + ) + + # No attributes implies no capabilities so we'll error if you try to + # pass both. + if ( + PKCS7Options.NoAttributes in options + and PKCS7Options.NoCapabilities in options + ): + raise ValueError( + "NoAttributes is a superset of NoCapabilities. Do not pass " + "both values." + ) + + return rust_pkcs7.sign_and_serialize(self, encoding, options) + + +class PKCS7EnvelopeBuilder: + def __init__( + self, + *, + _data: bytes | None = None, + _recipients: list[x509.Certificate] | None = None, + _content_encryption_algorithm: ContentEncryptionAlgorithm + | None = None, + ): + from cryptography.hazmat.backends.openssl.backend import ( + backend as ossl, + ) + + if not ossl.rsa_encryption_supported(padding=padding.PKCS1v15()): + raise UnsupportedAlgorithm( + "RSA with PKCS1 v1.5 padding is not supported by this version" + " of OpenSSL.", + _Reasons.UNSUPPORTED_PADDING, + ) + self._data = _data + self._recipients = _recipients if _recipients is not None else [] + self._content_encryption_algorithm = _content_encryption_algorithm + + def set_data(self, data: bytes) -> PKCS7EnvelopeBuilder: + _check_byteslike("data", data) + if self._data is not None: + raise ValueError("data may only be set once") + + return PKCS7EnvelopeBuilder( + _data=data, + _recipients=self._recipients, + _content_encryption_algorithm=self._content_encryption_algorithm, + ) + + def add_recipient( + self, + certificate: x509.Certificate, + ) -> PKCS7EnvelopeBuilder: + if not isinstance(certificate, x509.Certificate): + raise TypeError("certificate must be a x509.Certificate") + + if not isinstance(certificate.public_key(), rsa.RSAPublicKey): + raise TypeError("Only RSA keys are supported at this time.") + + return PKCS7EnvelopeBuilder( + _data=self._data, + _recipients=[ + *self._recipients, + certificate, + ], + _content_encryption_algorithm=self._content_encryption_algorithm, + ) + + def set_content_encryption_algorithm( + self, content_encryption_algorithm: ContentEncryptionAlgorithm + ) -> PKCS7EnvelopeBuilder: + if self._content_encryption_algorithm is not None: + raise ValueError("Content encryption algo may only be set once") + if content_encryption_algorithm not in { + algorithms.AES128, + algorithms.AES256, + }: + raise TypeError("Only AES128 and AES256 are supported") + + return PKCS7EnvelopeBuilder( + _data=self._data, + _recipients=self._recipients, + _content_encryption_algorithm=content_encryption_algorithm, + ) + + def encrypt( + self, + encoding: serialization.Encoding, + options: Iterable[PKCS7Options], + ) -> bytes: + if len(self._recipients) == 0: + raise ValueError("Must have at least one recipient") + if self._data is None: + raise ValueError("You must add data to encrypt") + + # The default content encryption algorithm is AES-128, which the S/MIME + # v3.2 RFC specifies as MUST support (https://datatracker.ietf.org/doc/html/rfc5751#section-2.7) + content_encryption_algorithm = ( + self._content_encryption_algorithm or algorithms.AES128 + ) + + options = list(options) + if not all(isinstance(x, PKCS7Options) for x in options): + raise ValueError("options must be from the PKCS7Options enum") + if encoding not in ( + serialization.Encoding.PEM, + serialization.Encoding.DER, + serialization.Encoding.SMIME, + ): + raise ValueError( + "Must be PEM, DER, or SMIME from the Encoding enum" + ) + + # Only allow options that make sense for encryption + if any( + opt not in [PKCS7Options.Text, PKCS7Options.Binary] + for opt in options + ): + raise ValueError( + "Only the following options are supported for encryption: " + "Text, Binary" + ) + elif PKCS7Options.Text in options and PKCS7Options.Binary in options: + # OpenSSL accepts both options at the same time, but ignores Text. + # We fail defensively to avoid unexpected outputs. + raise ValueError( + "Cannot use Binary and Text options at the same time" + ) + + return rust_pkcs7.encrypt_and_serialize( + self, content_encryption_algorithm, encoding, options + ) + + +pkcs7_decrypt_der = rust_pkcs7.decrypt_der +pkcs7_decrypt_pem = rust_pkcs7.decrypt_pem +pkcs7_decrypt_smime = rust_pkcs7.decrypt_smime + + +def _smime_signed_encode( + data: bytes, signature: bytes, micalg: str, text_mode: bool +) -> bytes: + # This function works pretty hard to replicate what OpenSSL does + # precisely. For good and for ill. + + m = email.message.Message() + m.add_header("MIME-Version", "1.0") + m.add_header( + "Content-Type", + "multipart/signed", + protocol="application/x-pkcs7-signature", + micalg=micalg, + ) + + m.preamble = "This is an S/MIME signed message\n" + + msg_part = OpenSSLMimePart() + msg_part.set_payload(data) + if text_mode: + msg_part.add_header("Content-Type", "text/plain") + m.attach(msg_part) + + sig_part = email.message.MIMEPart() + sig_part.add_header( + "Content-Type", "application/x-pkcs7-signature", name="smime.p7s" + ) + sig_part.add_header("Content-Transfer-Encoding", "base64") + sig_part.add_header( + "Content-Disposition", "attachment", filename="smime.p7s" + ) + sig_part.set_payload( + email.base64mime.body_encode(signature, maxlinelen=65) + ) + del sig_part["MIME-Version"] + m.attach(sig_part) + + fp = io.BytesIO() + g = email.generator.BytesGenerator( + fp, + maxheaderlen=0, + mangle_from_=False, + policy=m.policy.clone(linesep="\r\n"), + ) + g.flatten(m) + return fp.getvalue() + + +def _smime_enveloped_encode(data: bytes) -> bytes: + m = email.message.Message() + m.add_header("MIME-Version", "1.0") + m.add_header("Content-Disposition", "attachment", filename="smime.p7m") + m.add_header( + "Content-Type", + "application/pkcs7-mime", + smime_type="enveloped-data", + name="smime.p7m", + ) + m.add_header("Content-Transfer-Encoding", "base64") + + m.set_payload(email.base64mime.body_encode(data, maxlinelen=65)) + + return m.as_bytes(policy=m.policy.clone(linesep="\n", max_line_length=0)) + + +def _smime_enveloped_decode(data: bytes) -> bytes: + m = email.message_from_bytes(data) + if m.get_content_type() not in { + "application/x-pkcs7-mime", + "application/pkcs7-mime", + }: + raise ValueError("Not an S/MIME enveloped message") + return bytes(m.get_payload(decode=True)) + + +def _smime_remove_text_headers(data: bytes) -> bytes: + m = email.message_from_bytes(data) + # Using get() instead of get_content_type() since it has None as default, + # where the latter has "text/plain". Both methods are case-insensitive. + content_type = m.get("content-type") + if content_type is None: + raise ValueError( + "Decrypted MIME data has no 'Content-Type' header. " + "Please remove the 'Text' option to parse it manually." + ) + if "text/plain" not in content_type: + raise ValueError( + f"Decrypted MIME data content type is '{content_type}', not " + "'text/plain'. Remove the 'Text' option to parse it manually." + ) + return bytes(m.get_payload(decode=True)) + + +class OpenSSLMimePart(email.message.MIMEPart): + # A MIMEPart subclass that replicates OpenSSL's behavior of not including + # a newline if there are no headers. + def _write_headers(self, generator) -> None: + if list(self.raw_items()): + generator._write_headers(self) diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/ssh.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/ssh.py new file mode 100644 index 00000000..cb10cf89 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/ssh.py @@ -0,0 +1,1619 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import binascii +import enum +import os +import re +import typing +import warnings +from base64 import encodebytes as _base64_encode +from dataclasses import dataclass + +from cryptography import utils +from cryptography.exceptions import UnsupportedAlgorithm +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives.asymmetric import ( + dsa, + ec, + ed25519, + padding, + rsa, +) +from cryptography.hazmat.primitives.asymmetric import utils as asym_utils +from cryptography.hazmat.primitives.ciphers import ( + AEADDecryptionContext, + Cipher, + algorithms, + modes, +) +from cryptography.hazmat.primitives.serialization import ( + Encoding, + KeySerializationEncryption, + NoEncryption, + PrivateFormat, + PublicFormat, + _KeySerializationEncryption, +) + +try: + from bcrypt import kdf as _bcrypt_kdf + + _bcrypt_supported = True +except ImportError: + _bcrypt_supported = False + + def _bcrypt_kdf( + password: bytes, + salt: bytes, + desired_key_bytes: int, + rounds: int, + ignore_few_rounds: bool = False, + ) -> bytes: + raise UnsupportedAlgorithm("Need bcrypt module") + + +_SSH_ED25519 = b"ssh-ed25519" +_SSH_RSA = b"ssh-rsa" +_SSH_DSA = b"ssh-dss" +_ECDSA_NISTP256 = b"ecdsa-sha2-nistp256" +_ECDSA_NISTP384 = b"ecdsa-sha2-nistp384" +_ECDSA_NISTP521 = b"ecdsa-sha2-nistp521" +_CERT_SUFFIX = b"-cert-v01@openssh.com" + +# U2F application string suffixed pubkey +_SK_SSH_ED25519 = b"sk-ssh-ed25519@openssh.com" +_SK_SSH_ECDSA_NISTP256 = b"sk-ecdsa-sha2-nistp256@openssh.com" + +# These are not key types, only algorithms, so they cannot appear +# as a public key type +_SSH_RSA_SHA256 = b"rsa-sha2-256" +_SSH_RSA_SHA512 = b"rsa-sha2-512" + +_SSH_PUBKEY_RC = re.compile(rb"\A(\S+)[ \t]+(\S+)") +_SK_MAGIC = b"openssh-key-v1\0" +_SK_START = b"-----BEGIN OPENSSH PRIVATE KEY-----" +_SK_END = b"-----END OPENSSH PRIVATE KEY-----" +_BCRYPT = b"bcrypt" +_NONE = b"none" +_DEFAULT_CIPHER = b"aes256-ctr" +_DEFAULT_ROUNDS = 16 + +# re is only way to work on bytes-like data +_PEM_RC = re.compile(_SK_START + b"(.*?)" + _SK_END, re.DOTALL) + +# padding for max blocksize +_PADDING = memoryview(bytearray(range(1, 1 + 16))) + + +@dataclass +class _SSHCipher: + alg: type[algorithms.AES] + key_len: int + mode: type[modes.CTR] | type[modes.CBC] | type[modes.GCM] + block_len: int + iv_len: int + tag_len: int | None + is_aead: bool + + +# ciphers that are actually used in key wrapping +_SSH_CIPHERS: dict[bytes, _SSHCipher] = { + b"aes256-ctr": _SSHCipher( + alg=algorithms.AES, + key_len=32, + mode=modes.CTR, + block_len=16, + iv_len=16, + tag_len=None, + is_aead=False, + ), + b"aes256-cbc": _SSHCipher( + alg=algorithms.AES, + key_len=32, + mode=modes.CBC, + block_len=16, + iv_len=16, + tag_len=None, + is_aead=False, + ), + b"aes256-gcm@openssh.com": _SSHCipher( + alg=algorithms.AES, + key_len=32, + mode=modes.GCM, + block_len=16, + iv_len=12, + tag_len=16, + is_aead=True, + ), +} + +# map local curve name to key type +_ECDSA_KEY_TYPE = { + "secp256r1": _ECDSA_NISTP256, + "secp384r1": _ECDSA_NISTP384, + "secp521r1": _ECDSA_NISTP521, +} + + +def _get_ssh_key_type(key: SSHPrivateKeyTypes | SSHPublicKeyTypes) -> bytes: + if isinstance(key, ec.EllipticCurvePrivateKey): + key_type = _ecdsa_key_type(key.public_key()) + elif isinstance(key, ec.EllipticCurvePublicKey): + key_type = _ecdsa_key_type(key) + elif isinstance(key, (rsa.RSAPrivateKey, rsa.RSAPublicKey)): + key_type = _SSH_RSA + elif isinstance(key, (dsa.DSAPrivateKey, dsa.DSAPublicKey)): + key_type = _SSH_DSA + elif isinstance( + key, (ed25519.Ed25519PrivateKey, ed25519.Ed25519PublicKey) + ): + key_type = _SSH_ED25519 + else: + raise ValueError("Unsupported key type") + + return key_type + + +def _ecdsa_key_type(public_key: ec.EllipticCurvePublicKey) -> bytes: + """Return SSH key_type and curve_name for private key.""" + curve = public_key.curve + if curve.name not in _ECDSA_KEY_TYPE: + raise ValueError( + f"Unsupported curve for ssh private key: {curve.name!r}" + ) + return _ECDSA_KEY_TYPE[curve.name] + + +def _ssh_pem_encode( + data: utils.Buffer, + prefix: bytes = _SK_START + b"\n", + suffix: bytes = _SK_END + b"\n", +) -> bytes: + return b"".join([prefix, _base64_encode(data), suffix]) + + +def _check_block_size(data: utils.Buffer, block_len: int) -> None: + """Require data to be full blocks""" + if not data or len(data) % block_len != 0: + raise ValueError("Corrupt data: missing padding") + + +def _check_empty(data: utils.Buffer) -> None: + """All data should have been parsed.""" + if data: + raise ValueError("Corrupt data: unparsed data") + + +def _init_cipher( + ciphername: bytes, + password: bytes | None, + salt: bytes, + rounds: int, +) -> Cipher[modes.CBC | modes.CTR | modes.GCM]: + """Generate key + iv and return cipher.""" + if not password: + raise TypeError( + "Key is password-protected, but password was not provided." + ) + + ciph = _SSH_CIPHERS[ciphername] + seed = _bcrypt_kdf( + password, salt, ciph.key_len + ciph.iv_len, rounds, True + ) + return Cipher( + ciph.alg(seed[: ciph.key_len]), + ciph.mode(seed[ciph.key_len :]), + ) + + +def _get_u32(data: memoryview) -> tuple[int, memoryview]: + """Uint32""" + if len(data) < 4: + raise ValueError("Invalid data") + return int.from_bytes(data[:4], byteorder="big"), data[4:] + + +def _get_u64(data: memoryview) -> tuple[int, memoryview]: + """Uint64""" + if len(data) < 8: + raise ValueError("Invalid data") + return int.from_bytes(data[:8], byteorder="big"), data[8:] + + +def _get_sshstr(data: memoryview) -> tuple[memoryview, memoryview]: + """Bytes with u32 length prefix""" + n, data = _get_u32(data) + if n > len(data): + raise ValueError("Invalid data") + return data[:n], data[n:] + + +def _get_mpint(data: memoryview) -> tuple[int, memoryview]: + """Big integer.""" + val, data = _get_sshstr(data) + if val and val[0] > 0x7F: + raise ValueError("Invalid data") + return int.from_bytes(val, "big"), data + + +def _to_mpint(val: int) -> bytes: + """Storage format for signed bigint.""" + if val < 0: + raise ValueError("negative mpint not allowed") + if not val: + return b"" + nbytes = (val.bit_length() + 8) // 8 + return utils.int_to_bytes(val, nbytes) + + +class _FragList: + """Build recursive structure without data copy.""" + + flist: list[utils.Buffer] + + def __init__(self, init: list[utils.Buffer] | None = None) -> None: + self.flist = [] + if init: + self.flist.extend(init) + + def put_raw(self, val: utils.Buffer) -> None: + """Add plain bytes""" + self.flist.append(val) + + def put_u32(self, val: int) -> None: + """Big-endian uint32""" + self.flist.append(val.to_bytes(length=4, byteorder="big")) + + def put_u64(self, val: int) -> None: + """Big-endian uint64""" + self.flist.append(val.to_bytes(length=8, byteorder="big")) + + def put_sshstr(self, val: bytes | _FragList) -> None: + """Bytes prefixed with u32 length""" + if isinstance(val, (bytes, memoryview, bytearray)): + self.put_u32(len(val)) + self.flist.append(val) + else: + self.put_u32(val.size()) + self.flist.extend(val.flist) + + def put_mpint(self, val: int) -> None: + """Big-endian bigint prefixed with u32 length""" + self.put_sshstr(_to_mpint(val)) + + def size(self) -> int: + """Current number of bytes""" + return sum(map(len, self.flist)) + + def render(self, dstbuf: memoryview, pos: int = 0) -> int: + """Write into bytearray""" + for frag in self.flist: + flen = len(frag) + start, pos = pos, pos + flen + dstbuf[start:pos] = frag + return pos + + def tobytes(self) -> bytes: + """Return as bytes""" + buf = memoryview(bytearray(self.size())) + self.render(buf) + return buf.tobytes() + + +class _SSHFormatRSA: + """Format for RSA keys. + + Public: + mpint e, n + Private: + mpint n, e, d, iqmp, p, q + """ + + def get_public( + self, data: memoryview + ) -> tuple[tuple[int, int], memoryview]: + """RSA public fields""" + e, data = _get_mpint(data) + n, data = _get_mpint(data) + return (e, n), data + + def load_public( + self, data: memoryview + ) -> tuple[rsa.RSAPublicKey, memoryview]: + """Make RSA public key from data.""" + (e, n), data = self.get_public(data) + public_numbers = rsa.RSAPublicNumbers(e, n) + public_key = public_numbers.public_key() + return public_key, data + + def load_private( + self, data: memoryview, pubfields, unsafe_skip_rsa_key_validation: bool + ) -> tuple[rsa.RSAPrivateKey, memoryview]: + """Make RSA private key from data.""" + n, data = _get_mpint(data) + e, data = _get_mpint(data) + d, data = _get_mpint(data) + iqmp, data = _get_mpint(data) + p, data = _get_mpint(data) + q, data = _get_mpint(data) + + if (e, n) != pubfields: + raise ValueError("Corrupt data: rsa field mismatch") + dmp1 = rsa.rsa_crt_dmp1(d, p) + dmq1 = rsa.rsa_crt_dmq1(d, q) + public_numbers = rsa.RSAPublicNumbers(e, n) + private_numbers = rsa.RSAPrivateNumbers( + p, q, d, dmp1, dmq1, iqmp, public_numbers + ) + private_key = private_numbers.private_key( + unsafe_skip_rsa_key_validation=unsafe_skip_rsa_key_validation + ) + return private_key, data + + def encode_public( + self, public_key: rsa.RSAPublicKey, f_pub: _FragList + ) -> None: + """Write RSA public key""" + pubn = public_key.public_numbers() + f_pub.put_mpint(pubn.e) + f_pub.put_mpint(pubn.n) + + def encode_private( + self, private_key: rsa.RSAPrivateKey, f_priv: _FragList + ) -> None: + """Write RSA private key""" + private_numbers = private_key.private_numbers() + public_numbers = private_numbers.public_numbers + + f_priv.put_mpint(public_numbers.n) + f_priv.put_mpint(public_numbers.e) + + f_priv.put_mpint(private_numbers.d) + f_priv.put_mpint(private_numbers.iqmp) + f_priv.put_mpint(private_numbers.p) + f_priv.put_mpint(private_numbers.q) + + +class _SSHFormatDSA: + """Format for DSA keys. + + Public: + mpint p, q, g, y + Private: + mpint p, q, g, y, x + """ + + def get_public(self, data: memoryview) -> tuple[tuple, memoryview]: + """DSA public fields""" + p, data = _get_mpint(data) + q, data = _get_mpint(data) + g, data = _get_mpint(data) + y, data = _get_mpint(data) + return (p, q, g, y), data + + def load_public( + self, data: memoryview + ) -> tuple[dsa.DSAPublicKey, memoryview]: + """Make DSA public key from data.""" + (p, q, g, y), data = self.get_public(data) + parameter_numbers = dsa.DSAParameterNumbers(p, q, g) + public_numbers = dsa.DSAPublicNumbers(y, parameter_numbers) + self._validate(public_numbers) + public_key = public_numbers.public_key() + return public_key, data + + def load_private( + self, data: memoryview, pubfields, unsafe_skip_rsa_key_validation: bool + ) -> tuple[dsa.DSAPrivateKey, memoryview]: + """Make DSA private key from data.""" + (p, q, g, y), data = self.get_public(data) + x, data = _get_mpint(data) + + if (p, q, g, y) != pubfields: + raise ValueError("Corrupt data: dsa field mismatch") + parameter_numbers = dsa.DSAParameterNumbers(p, q, g) + public_numbers = dsa.DSAPublicNumbers(y, parameter_numbers) + self._validate(public_numbers) + private_numbers = dsa.DSAPrivateNumbers(x, public_numbers) + private_key = private_numbers.private_key() + return private_key, data + + def encode_public( + self, public_key: dsa.DSAPublicKey, f_pub: _FragList + ) -> None: + """Write DSA public key""" + public_numbers = public_key.public_numbers() + parameter_numbers = public_numbers.parameter_numbers + self._validate(public_numbers) + + f_pub.put_mpint(parameter_numbers.p) + f_pub.put_mpint(parameter_numbers.q) + f_pub.put_mpint(parameter_numbers.g) + f_pub.put_mpint(public_numbers.y) + + def encode_private( + self, private_key: dsa.DSAPrivateKey, f_priv: _FragList + ) -> None: + """Write DSA private key""" + self.encode_public(private_key.public_key(), f_priv) + f_priv.put_mpint(private_key.private_numbers().x) + + def _validate(self, public_numbers: dsa.DSAPublicNumbers) -> None: + parameter_numbers = public_numbers.parameter_numbers + if parameter_numbers.p.bit_length() != 1024: + raise ValueError("SSH supports only 1024 bit DSA keys") + + +class _SSHFormatECDSA: + """Format for ECDSA keys. + + Public: + str curve + bytes point + Private: + str curve + bytes point + mpint secret + """ + + def __init__(self, ssh_curve_name: bytes, curve: ec.EllipticCurve): + self.ssh_curve_name = ssh_curve_name + self.curve = curve + + def get_public( + self, data: memoryview + ) -> tuple[tuple[memoryview, memoryview], memoryview]: + """ECDSA public fields""" + curve, data = _get_sshstr(data) + point, data = _get_sshstr(data) + if curve != self.ssh_curve_name: + raise ValueError("Curve name mismatch") + if point[0] != 4: + raise NotImplementedError("Need uncompressed point") + return (curve, point), data + + def load_public( + self, data: memoryview + ) -> tuple[ec.EllipticCurvePublicKey, memoryview]: + """Make ECDSA public key from data.""" + (_, point), data = self.get_public(data) + public_key = ec.EllipticCurvePublicKey.from_encoded_point( + self.curve, point.tobytes() + ) + return public_key, data + + def load_private( + self, data: memoryview, pubfields, unsafe_skip_rsa_key_validation: bool + ) -> tuple[ec.EllipticCurvePrivateKey, memoryview]: + """Make ECDSA private key from data.""" + (curve_name, point), data = self.get_public(data) + secret, data = _get_mpint(data) + + if (curve_name, point) != pubfields: + raise ValueError("Corrupt data: ecdsa field mismatch") + private_key = ec.derive_private_key(secret, self.curve) + return private_key, data + + def encode_public( + self, public_key: ec.EllipticCurvePublicKey, f_pub: _FragList + ) -> None: + """Write ECDSA public key""" + point = public_key.public_bytes( + Encoding.X962, PublicFormat.UncompressedPoint + ) + f_pub.put_sshstr(self.ssh_curve_name) + f_pub.put_sshstr(point) + + def encode_private( + self, private_key: ec.EllipticCurvePrivateKey, f_priv: _FragList + ) -> None: + """Write ECDSA private key""" + public_key = private_key.public_key() + private_numbers = private_key.private_numbers() + + self.encode_public(public_key, f_priv) + f_priv.put_mpint(private_numbers.private_value) + + +class _SSHFormatEd25519: + """Format for Ed25519 keys. + + Public: + bytes point + Private: + bytes point + bytes secret_and_point + """ + + def get_public( + self, data: memoryview + ) -> tuple[tuple[memoryview], memoryview]: + """Ed25519 public fields""" + point, data = _get_sshstr(data) + return (point,), data + + def load_public( + self, data: memoryview + ) -> tuple[ed25519.Ed25519PublicKey, memoryview]: + """Make Ed25519 public key from data.""" + (point,), data = self.get_public(data) + public_key = ed25519.Ed25519PublicKey.from_public_bytes( + point.tobytes() + ) + return public_key, data + + def load_private( + self, data: memoryview, pubfields, unsafe_skip_rsa_key_validation: bool + ) -> tuple[ed25519.Ed25519PrivateKey, memoryview]: + """Make Ed25519 private key from data.""" + (point,), data = self.get_public(data) + keypair, data = _get_sshstr(data) + + secret = keypair[:32] + point2 = keypair[32:] + if point != point2 or (point,) != pubfields: + raise ValueError("Corrupt data: ed25519 field mismatch") + private_key = ed25519.Ed25519PrivateKey.from_private_bytes(secret) + return private_key, data + + def encode_public( + self, public_key: ed25519.Ed25519PublicKey, f_pub: _FragList + ) -> None: + """Write Ed25519 public key""" + raw_public_key = public_key.public_bytes( + Encoding.Raw, PublicFormat.Raw + ) + f_pub.put_sshstr(raw_public_key) + + def encode_private( + self, private_key: ed25519.Ed25519PrivateKey, f_priv: _FragList + ) -> None: + """Write Ed25519 private key""" + public_key = private_key.public_key() + raw_private_key = private_key.private_bytes( + Encoding.Raw, PrivateFormat.Raw, NoEncryption() + ) + raw_public_key = public_key.public_bytes( + Encoding.Raw, PublicFormat.Raw + ) + f_keypair = _FragList([raw_private_key, raw_public_key]) + + self.encode_public(public_key, f_priv) + f_priv.put_sshstr(f_keypair) + + +def load_application(data) -> tuple[memoryview, memoryview]: + """ + U2F application strings + """ + application, data = _get_sshstr(data) + if not application.tobytes().startswith(b"ssh:"): + raise ValueError( + "U2F application string does not start with b'ssh:' " + f"({application})" + ) + return application, data + + +class _SSHFormatSKEd25519: + """ + The format of a sk-ssh-ed25519@openssh.com public key is: + + string "sk-ssh-ed25519@openssh.com" + string public key + string application (user-specified, but typically "ssh:") + """ + + def load_public( + self, data: memoryview + ) -> tuple[ed25519.Ed25519PublicKey, memoryview]: + """Make Ed25519 public key from data.""" + public_key, data = _lookup_kformat(_SSH_ED25519).load_public(data) + _, data = load_application(data) + return public_key, data + + def get_public(self, data: memoryview) -> typing.NoReturn: + # Confusingly `get_public` is an entry point used by private key + # loading. + raise UnsupportedAlgorithm( + "sk-ssh-ed25519 private keys cannot be loaded" + ) + + +class _SSHFormatSKECDSA: + """ + The format of a sk-ecdsa-sha2-nistp256@openssh.com public key is: + + string "sk-ecdsa-sha2-nistp256@openssh.com" + string curve name + ec_point Q + string application (user-specified, but typically "ssh:") + """ + + def load_public( + self, data: memoryview + ) -> tuple[ec.EllipticCurvePublicKey, memoryview]: + """Make ECDSA public key from data.""" + public_key, data = _lookup_kformat(_ECDSA_NISTP256).load_public(data) + _, data = load_application(data) + return public_key, data + + def get_public(self, data: memoryview) -> typing.NoReturn: + # Confusingly `get_public` is an entry point used by private key + # loading. + raise UnsupportedAlgorithm( + "sk-ecdsa-sha2-nistp256 private keys cannot be loaded" + ) + + +_KEY_FORMATS = { + _SSH_RSA: _SSHFormatRSA(), + _SSH_DSA: _SSHFormatDSA(), + _SSH_ED25519: _SSHFormatEd25519(), + _ECDSA_NISTP256: _SSHFormatECDSA(b"nistp256", ec.SECP256R1()), + _ECDSA_NISTP384: _SSHFormatECDSA(b"nistp384", ec.SECP384R1()), + _ECDSA_NISTP521: _SSHFormatECDSA(b"nistp521", ec.SECP521R1()), + _SK_SSH_ED25519: _SSHFormatSKEd25519(), + _SK_SSH_ECDSA_NISTP256: _SSHFormatSKECDSA(), +} + + +def _lookup_kformat(key_type: utils.Buffer): + """Return valid format or throw error""" + if not isinstance(key_type, bytes): + key_type = memoryview(key_type).tobytes() + if key_type in _KEY_FORMATS: + return _KEY_FORMATS[key_type] + raise UnsupportedAlgorithm(f"Unsupported key type: {key_type!r}") + + +SSHPrivateKeyTypes = typing.Union[ + ec.EllipticCurvePrivateKey, + rsa.RSAPrivateKey, + dsa.DSAPrivateKey, + ed25519.Ed25519PrivateKey, +] + + +def load_ssh_private_key( + data: utils.Buffer, + password: bytes | None, + backend: typing.Any = None, + *, + unsafe_skip_rsa_key_validation: bool = False, +) -> SSHPrivateKeyTypes: + """Load private key from OpenSSH custom encoding.""" + utils._check_byteslike("data", data) + if password is not None: + utils._check_bytes("password", password) + + m = _PEM_RC.search(data) + if not m: + raise ValueError("Not OpenSSH private key format") + p1 = m.start(1) + p2 = m.end(1) + data = binascii.a2b_base64(memoryview(data)[p1:p2]) + if not data.startswith(_SK_MAGIC): + raise ValueError("Not OpenSSH private key format") + data = memoryview(data)[len(_SK_MAGIC) :] + + # parse header + ciphername, data = _get_sshstr(data) + kdfname, data = _get_sshstr(data) + kdfoptions, data = _get_sshstr(data) + nkeys, data = _get_u32(data) + if nkeys != 1: + raise ValueError("Only one key supported") + + # load public key data + pubdata, data = _get_sshstr(data) + pub_key_type, pubdata = _get_sshstr(pubdata) + kformat = _lookup_kformat(pub_key_type) + pubfields, pubdata = kformat.get_public(pubdata) + _check_empty(pubdata) + + if ciphername != _NONE or kdfname != _NONE: + ciphername_bytes = ciphername.tobytes() + if ciphername_bytes not in _SSH_CIPHERS: + raise UnsupportedAlgorithm( + f"Unsupported cipher: {ciphername_bytes!r}" + ) + if kdfname != _BCRYPT: + raise UnsupportedAlgorithm(f"Unsupported KDF: {kdfname!r}") + blklen = _SSH_CIPHERS[ciphername_bytes].block_len + tag_len = _SSH_CIPHERS[ciphername_bytes].tag_len + # load secret data + edata, data = _get_sshstr(data) + # see https://bugzilla.mindrot.org/show_bug.cgi?id=3553 for + # information about how OpenSSH handles AEAD tags + if _SSH_CIPHERS[ciphername_bytes].is_aead: + tag = bytes(data) + if len(tag) != tag_len: + raise ValueError("Corrupt data: invalid tag length for cipher") + else: + _check_empty(data) + _check_block_size(edata, blklen) + salt, kbuf = _get_sshstr(kdfoptions) + rounds, kbuf = _get_u32(kbuf) + _check_empty(kbuf) + ciph = _init_cipher(ciphername_bytes, password, salt.tobytes(), rounds) + dec = ciph.decryptor() + edata = memoryview(dec.update(edata)) + if _SSH_CIPHERS[ciphername_bytes].is_aead: + assert isinstance(dec, AEADDecryptionContext) + _check_empty(dec.finalize_with_tag(tag)) + else: + # _check_block_size requires data to be a full block so there + # should be no output from finalize + _check_empty(dec.finalize()) + else: + if password: + raise TypeError( + "Password was given but private key is not encrypted." + ) + # load secret data + edata, data = _get_sshstr(data) + _check_empty(data) + blklen = 8 + _check_block_size(edata, blklen) + ck1, edata = _get_u32(edata) + ck2, edata = _get_u32(edata) + if ck1 != ck2: + raise ValueError("Corrupt data: broken checksum") + + # load per-key struct + key_type, edata = _get_sshstr(edata) + if key_type != pub_key_type: + raise ValueError("Corrupt data: key type mismatch") + private_key, edata = kformat.load_private( + edata, + pubfields, + unsafe_skip_rsa_key_validation=unsafe_skip_rsa_key_validation, + ) + # We don't use the comment + _, edata = _get_sshstr(edata) + + # yes, SSH does padding check *after* all other parsing is done. + # need to follow as it writes zero-byte padding too. + if edata != _PADDING[: len(edata)]: + raise ValueError("Corrupt data: invalid padding") + + if isinstance(private_key, dsa.DSAPrivateKey): + warnings.warn( + "SSH DSA keys are deprecated and will be removed in a future " + "release.", + utils.DeprecatedIn40, + stacklevel=2, + ) + + return private_key + + +def _serialize_ssh_private_key( + private_key: SSHPrivateKeyTypes, + password: bytes, + encryption_algorithm: KeySerializationEncryption, +) -> bytes: + """Serialize private key with OpenSSH custom encoding.""" + utils._check_bytes("password", password) + if isinstance(private_key, dsa.DSAPrivateKey): + warnings.warn( + "SSH DSA key support is deprecated and will be " + "removed in a future release", + utils.DeprecatedIn40, + stacklevel=4, + ) + + key_type = _get_ssh_key_type(private_key) + kformat = _lookup_kformat(key_type) + + # setup parameters + f_kdfoptions = _FragList() + if password: + ciphername = _DEFAULT_CIPHER + blklen = _SSH_CIPHERS[ciphername].block_len + kdfname = _BCRYPT + rounds = _DEFAULT_ROUNDS + if ( + isinstance(encryption_algorithm, _KeySerializationEncryption) + and encryption_algorithm._kdf_rounds is not None + ): + rounds = encryption_algorithm._kdf_rounds + salt = os.urandom(16) + f_kdfoptions.put_sshstr(salt) + f_kdfoptions.put_u32(rounds) + ciph = _init_cipher(ciphername, password, salt, rounds) + else: + ciphername = kdfname = _NONE + blklen = 8 + ciph = None + nkeys = 1 + checkval = os.urandom(4) + comment = b"" + + # encode public and private parts together + f_public_key = _FragList() + f_public_key.put_sshstr(key_type) + kformat.encode_public(private_key.public_key(), f_public_key) + + f_secrets = _FragList([checkval, checkval]) + f_secrets.put_sshstr(key_type) + kformat.encode_private(private_key, f_secrets) + f_secrets.put_sshstr(comment) + f_secrets.put_raw(_PADDING[: blklen - (f_secrets.size() % blklen)]) + + # top-level structure + f_main = _FragList() + f_main.put_raw(_SK_MAGIC) + f_main.put_sshstr(ciphername) + f_main.put_sshstr(kdfname) + f_main.put_sshstr(f_kdfoptions) + f_main.put_u32(nkeys) + f_main.put_sshstr(f_public_key) + f_main.put_sshstr(f_secrets) + + # copy result info bytearray + slen = f_secrets.size() + mlen = f_main.size() + buf = memoryview(bytearray(mlen + blklen)) + f_main.render(buf) + ofs = mlen - slen + + # encrypt in-place + if ciph is not None: + ciph.encryptor().update_into(buf[ofs:mlen], buf[ofs:]) + + return _ssh_pem_encode(buf[:mlen]) + + +SSHPublicKeyTypes = typing.Union[ + ec.EllipticCurvePublicKey, + rsa.RSAPublicKey, + dsa.DSAPublicKey, + ed25519.Ed25519PublicKey, +] + +SSHCertPublicKeyTypes = typing.Union[ + ec.EllipticCurvePublicKey, + rsa.RSAPublicKey, + ed25519.Ed25519PublicKey, +] + + +class SSHCertificateType(enum.Enum): + USER = 1 + HOST = 2 + + +class SSHCertificate: + def __init__( + self, + _nonce: memoryview, + _public_key: SSHPublicKeyTypes, + _serial: int, + _cctype: int, + _key_id: memoryview, + _valid_principals: list[bytes], + _valid_after: int, + _valid_before: int, + _critical_options: dict[bytes, bytes], + _extensions: dict[bytes, bytes], + _sig_type: memoryview, + _sig_key: memoryview, + _inner_sig_type: memoryview, + _signature: memoryview, + _tbs_cert_body: memoryview, + _cert_key_type: bytes, + _cert_body: memoryview, + ): + self._nonce = _nonce + self._public_key = _public_key + self._serial = _serial + try: + self._type = SSHCertificateType(_cctype) + except ValueError: + raise ValueError("Invalid certificate type") + self._key_id = _key_id + self._valid_principals = _valid_principals + self._valid_after = _valid_after + self._valid_before = _valid_before + self._critical_options = _critical_options + self._extensions = _extensions + self._sig_type = _sig_type + self._sig_key = _sig_key + self._inner_sig_type = _inner_sig_type + self._signature = _signature + self._cert_key_type = _cert_key_type + self._cert_body = _cert_body + self._tbs_cert_body = _tbs_cert_body + + @property + def nonce(self) -> bytes: + return bytes(self._nonce) + + def public_key(self) -> SSHCertPublicKeyTypes: + # make mypy happy until we remove DSA support entirely and + # the underlying union won't have a disallowed type + return typing.cast(SSHCertPublicKeyTypes, self._public_key) + + @property + def serial(self) -> int: + return self._serial + + @property + def type(self) -> SSHCertificateType: + return self._type + + @property + def key_id(self) -> bytes: + return bytes(self._key_id) + + @property + def valid_principals(self) -> list[bytes]: + return self._valid_principals + + @property + def valid_before(self) -> int: + return self._valid_before + + @property + def valid_after(self) -> int: + return self._valid_after + + @property + def critical_options(self) -> dict[bytes, bytes]: + return self._critical_options + + @property + def extensions(self) -> dict[bytes, bytes]: + return self._extensions + + def signature_key(self) -> SSHCertPublicKeyTypes: + sigformat = _lookup_kformat(self._sig_type) + signature_key, sigkey_rest = sigformat.load_public(self._sig_key) + _check_empty(sigkey_rest) + return signature_key + + def public_bytes(self) -> bytes: + return ( + bytes(self._cert_key_type) + + b" " + + binascii.b2a_base64(bytes(self._cert_body), newline=False) + ) + + def verify_cert_signature(self) -> None: + signature_key = self.signature_key() + if isinstance(signature_key, ed25519.Ed25519PublicKey): + signature_key.verify( + bytes(self._signature), bytes(self._tbs_cert_body) + ) + elif isinstance(signature_key, ec.EllipticCurvePublicKey): + # The signature is encoded as a pair of big-endian integers + r, data = _get_mpint(self._signature) + s, data = _get_mpint(data) + _check_empty(data) + computed_sig = asym_utils.encode_dss_signature(r, s) + hash_alg = _get_ec_hash_alg(signature_key.curve) + signature_key.verify( + computed_sig, bytes(self._tbs_cert_body), ec.ECDSA(hash_alg) + ) + else: + assert isinstance(signature_key, rsa.RSAPublicKey) + if self._inner_sig_type == _SSH_RSA: + hash_alg = hashes.SHA1() + elif self._inner_sig_type == _SSH_RSA_SHA256: + hash_alg = hashes.SHA256() + else: + assert self._inner_sig_type == _SSH_RSA_SHA512 + hash_alg = hashes.SHA512() + signature_key.verify( + bytes(self._signature), + bytes(self._tbs_cert_body), + padding.PKCS1v15(), + hash_alg, + ) + + +def _get_ec_hash_alg(curve: ec.EllipticCurve) -> hashes.HashAlgorithm: + if isinstance(curve, ec.SECP256R1): + return hashes.SHA256() + elif isinstance(curve, ec.SECP384R1): + return hashes.SHA384() + else: + assert isinstance(curve, ec.SECP521R1) + return hashes.SHA512() + + +def _load_ssh_public_identity( + data: utils.Buffer, + _legacy_dsa_allowed=False, +) -> SSHCertificate | SSHPublicKeyTypes: + utils._check_byteslike("data", data) + + m = _SSH_PUBKEY_RC.match(data) + if not m: + raise ValueError("Invalid line format") + key_type = orig_key_type = m.group(1) + key_body = m.group(2) + with_cert = False + if key_type.endswith(_CERT_SUFFIX): + with_cert = True + key_type = key_type[: -len(_CERT_SUFFIX)] + if key_type == _SSH_DSA and not _legacy_dsa_allowed: + raise UnsupportedAlgorithm( + "DSA keys aren't supported in SSH certificates" + ) + kformat = _lookup_kformat(key_type) + + try: + rest = memoryview(binascii.a2b_base64(key_body)) + except (TypeError, binascii.Error): + raise ValueError("Invalid format") + + if with_cert: + cert_body = rest + inner_key_type, rest = _get_sshstr(rest) + if inner_key_type != orig_key_type: + raise ValueError("Invalid key format") + if with_cert: + nonce, rest = _get_sshstr(rest) + public_key, rest = kformat.load_public(rest) + if with_cert: + serial, rest = _get_u64(rest) + cctype, rest = _get_u32(rest) + key_id, rest = _get_sshstr(rest) + principals, rest = _get_sshstr(rest) + valid_principals = [] + while principals: + principal, principals = _get_sshstr(principals) + valid_principals.append(bytes(principal)) + valid_after, rest = _get_u64(rest) + valid_before, rest = _get_u64(rest) + crit_options, rest = _get_sshstr(rest) + critical_options = _parse_exts_opts(crit_options) + exts, rest = _get_sshstr(rest) + extensions = _parse_exts_opts(exts) + # Get the reserved field, which is unused. + _, rest = _get_sshstr(rest) + sig_key_raw, rest = _get_sshstr(rest) + sig_type, sig_key = _get_sshstr(sig_key_raw) + if sig_type == _SSH_DSA and not _legacy_dsa_allowed: + raise UnsupportedAlgorithm( + "DSA signatures aren't supported in SSH certificates" + ) + # Get the entire cert body and subtract the signature + tbs_cert_body = cert_body[: -len(rest)] + signature_raw, rest = _get_sshstr(rest) + _check_empty(rest) + inner_sig_type, sig_rest = _get_sshstr(signature_raw) + # RSA certs can have multiple algorithm types + if ( + sig_type == _SSH_RSA + and inner_sig_type + not in [_SSH_RSA_SHA256, _SSH_RSA_SHA512, _SSH_RSA] + ) or (sig_type != _SSH_RSA and inner_sig_type != sig_type): + raise ValueError("Signature key type does not match") + signature, sig_rest = _get_sshstr(sig_rest) + _check_empty(sig_rest) + return SSHCertificate( + nonce, + public_key, + serial, + cctype, + key_id, + valid_principals, + valid_after, + valid_before, + critical_options, + extensions, + sig_type, + sig_key, + inner_sig_type, + signature, + tbs_cert_body, + orig_key_type, + cert_body, + ) + else: + _check_empty(rest) + return public_key + + +def load_ssh_public_identity( + data: utils.Buffer, +) -> SSHCertificate | SSHPublicKeyTypes: + return _load_ssh_public_identity(data) + + +def _parse_exts_opts(exts_opts: memoryview) -> dict[bytes, bytes]: + result: dict[bytes, bytes] = {} + last_name = None + while exts_opts: + name, exts_opts = _get_sshstr(exts_opts) + bname: bytes = bytes(name) + if bname in result: + raise ValueError("Duplicate name") + if last_name is not None and bname < last_name: + raise ValueError("Fields not lexically sorted") + value, exts_opts = _get_sshstr(exts_opts) + if len(value) > 0: + value, extra = _get_sshstr(value) + if len(extra) > 0: + raise ValueError("Unexpected extra data after value") + result[bname] = bytes(value) + last_name = bname + return result + + +def ssh_key_fingerprint( + key: SSHPublicKeyTypes, + hash_algorithm: hashes.MD5 | hashes.SHA256, +) -> bytes: + if not isinstance(hash_algorithm, (hashes.MD5, hashes.SHA256)): + raise TypeError("hash_algorithm must be either MD5 or SHA256") + + key_type = _get_ssh_key_type(key) + kformat = _lookup_kformat(key_type) + + f_pub = _FragList() + f_pub.put_sshstr(key_type) + kformat.encode_public(key, f_pub) + + ssh_binary_data = f_pub.tobytes() + + # Hash the binary data + hash_obj = hashes.Hash(hash_algorithm) + hash_obj.update(ssh_binary_data) + return hash_obj.finalize() + + +def load_ssh_public_key( + data: utils.Buffer, backend: typing.Any = None +) -> SSHPublicKeyTypes: + cert_or_key = _load_ssh_public_identity(data, _legacy_dsa_allowed=True) + public_key: SSHPublicKeyTypes + if isinstance(cert_or_key, SSHCertificate): + public_key = cert_or_key.public_key() + else: + public_key = cert_or_key + + if isinstance(public_key, dsa.DSAPublicKey): + warnings.warn( + "SSH DSA keys are deprecated and will be removed in a future " + "release.", + utils.DeprecatedIn40, + stacklevel=2, + ) + return public_key + + +def serialize_ssh_public_key(public_key: SSHPublicKeyTypes) -> bytes: + """One-line public key format for OpenSSH""" + if isinstance(public_key, dsa.DSAPublicKey): + warnings.warn( + "SSH DSA key support is deprecated and will be " + "removed in a future release", + utils.DeprecatedIn40, + stacklevel=4, + ) + key_type = _get_ssh_key_type(public_key) + kformat = _lookup_kformat(key_type) + + f_pub = _FragList() + f_pub.put_sshstr(key_type) + kformat.encode_public(public_key, f_pub) + + pub = binascii.b2a_base64(f_pub.tobytes()).strip() + return b"".join([key_type, b" ", pub]) + + +SSHCertPrivateKeyTypes = typing.Union[ + ec.EllipticCurvePrivateKey, + rsa.RSAPrivateKey, + ed25519.Ed25519PrivateKey, +] + + +# This is an undocumented limit enforced in the openssh codebase for sshd and +# ssh-keygen, but it is undefined in the ssh certificates spec. +_SSHKEY_CERT_MAX_PRINCIPALS = 256 + + +class SSHCertificateBuilder: + def __init__( + self, + _public_key: SSHCertPublicKeyTypes | None = None, + _serial: int | None = None, + _type: SSHCertificateType | None = None, + _key_id: bytes | None = None, + _valid_principals: list[bytes] = [], + _valid_for_all_principals: bool = False, + _valid_before: int | None = None, + _valid_after: int | None = None, + _critical_options: list[tuple[bytes, bytes]] = [], + _extensions: list[tuple[bytes, bytes]] = [], + ): + self._public_key = _public_key + self._serial = _serial + self._type = _type + self._key_id = _key_id + self._valid_principals = _valid_principals + self._valid_for_all_principals = _valid_for_all_principals + self._valid_before = _valid_before + self._valid_after = _valid_after + self._critical_options = _critical_options + self._extensions = _extensions + + def public_key( + self, public_key: SSHCertPublicKeyTypes + ) -> SSHCertificateBuilder: + if not isinstance( + public_key, + ( + ec.EllipticCurvePublicKey, + rsa.RSAPublicKey, + ed25519.Ed25519PublicKey, + ), + ): + raise TypeError("Unsupported key type") + if self._public_key is not None: + raise ValueError("public_key already set") + + return SSHCertificateBuilder( + _public_key=public_key, + _serial=self._serial, + _type=self._type, + _key_id=self._key_id, + _valid_principals=self._valid_principals, + _valid_for_all_principals=self._valid_for_all_principals, + _valid_before=self._valid_before, + _valid_after=self._valid_after, + _critical_options=self._critical_options, + _extensions=self._extensions, + ) + + def serial(self, serial: int) -> SSHCertificateBuilder: + if not isinstance(serial, int): + raise TypeError("serial must be an integer") + if not 0 <= serial < 2**64: + raise ValueError("serial must be between 0 and 2**64") + if self._serial is not None: + raise ValueError("serial already set") + + return SSHCertificateBuilder( + _public_key=self._public_key, + _serial=serial, + _type=self._type, + _key_id=self._key_id, + _valid_principals=self._valid_principals, + _valid_for_all_principals=self._valid_for_all_principals, + _valid_before=self._valid_before, + _valid_after=self._valid_after, + _critical_options=self._critical_options, + _extensions=self._extensions, + ) + + def type(self, type: SSHCertificateType) -> SSHCertificateBuilder: + if not isinstance(type, SSHCertificateType): + raise TypeError("type must be an SSHCertificateType") + if self._type is not None: + raise ValueError("type already set") + + return SSHCertificateBuilder( + _public_key=self._public_key, + _serial=self._serial, + _type=type, + _key_id=self._key_id, + _valid_principals=self._valid_principals, + _valid_for_all_principals=self._valid_for_all_principals, + _valid_before=self._valid_before, + _valid_after=self._valid_after, + _critical_options=self._critical_options, + _extensions=self._extensions, + ) + + def key_id(self, key_id: bytes) -> SSHCertificateBuilder: + if not isinstance(key_id, bytes): + raise TypeError("key_id must be bytes") + if self._key_id is not None: + raise ValueError("key_id already set") + + return SSHCertificateBuilder( + _public_key=self._public_key, + _serial=self._serial, + _type=self._type, + _key_id=key_id, + _valid_principals=self._valid_principals, + _valid_for_all_principals=self._valid_for_all_principals, + _valid_before=self._valid_before, + _valid_after=self._valid_after, + _critical_options=self._critical_options, + _extensions=self._extensions, + ) + + def valid_principals( + self, valid_principals: list[bytes] + ) -> SSHCertificateBuilder: + if self._valid_for_all_principals: + raise ValueError( + "Principals can't be set because the cert is valid " + "for all principals" + ) + if ( + not all(isinstance(x, bytes) for x in valid_principals) + or not valid_principals + ): + raise TypeError( + "principals must be a list of bytes and can't be empty" + ) + if self._valid_principals: + raise ValueError("valid_principals already set") + + if len(valid_principals) > _SSHKEY_CERT_MAX_PRINCIPALS: + raise ValueError( + "Reached or exceeded the maximum number of valid_principals" + ) + + return SSHCertificateBuilder( + _public_key=self._public_key, + _serial=self._serial, + _type=self._type, + _key_id=self._key_id, + _valid_principals=valid_principals, + _valid_for_all_principals=self._valid_for_all_principals, + _valid_before=self._valid_before, + _valid_after=self._valid_after, + _critical_options=self._critical_options, + _extensions=self._extensions, + ) + + def valid_for_all_principals(self): + if self._valid_principals: + raise ValueError( + "valid_principals already set, can't set " + "valid_for_all_principals" + ) + if self._valid_for_all_principals: + raise ValueError("valid_for_all_principals already set") + + return SSHCertificateBuilder( + _public_key=self._public_key, + _serial=self._serial, + _type=self._type, + _key_id=self._key_id, + _valid_principals=self._valid_principals, + _valid_for_all_principals=True, + _valid_before=self._valid_before, + _valid_after=self._valid_after, + _critical_options=self._critical_options, + _extensions=self._extensions, + ) + + def valid_before(self, valid_before: int | float) -> SSHCertificateBuilder: + if not isinstance(valid_before, (int, float)): + raise TypeError("valid_before must be an int or float") + valid_before = int(valid_before) + if valid_before < 0 or valid_before >= 2**64: + raise ValueError("valid_before must [0, 2**64)") + if self._valid_before is not None: + raise ValueError("valid_before already set") + + return SSHCertificateBuilder( + _public_key=self._public_key, + _serial=self._serial, + _type=self._type, + _key_id=self._key_id, + _valid_principals=self._valid_principals, + _valid_for_all_principals=self._valid_for_all_principals, + _valid_before=valid_before, + _valid_after=self._valid_after, + _critical_options=self._critical_options, + _extensions=self._extensions, + ) + + def valid_after(self, valid_after: int | float) -> SSHCertificateBuilder: + if not isinstance(valid_after, (int, float)): + raise TypeError("valid_after must be an int or float") + valid_after = int(valid_after) + if valid_after < 0 or valid_after >= 2**64: + raise ValueError("valid_after must [0, 2**64)") + if self._valid_after is not None: + raise ValueError("valid_after already set") + + return SSHCertificateBuilder( + _public_key=self._public_key, + _serial=self._serial, + _type=self._type, + _key_id=self._key_id, + _valid_principals=self._valid_principals, + _valid_for_all_principals=self._valid_for_all_principals, + _valid_before=self._valid_before, + _valid_after=valid_after, + _critical_options=self._critical_options, + _extensions=self._extensions, + ) + + def add_critical_option( + self, name: bytes, value: bytes + ) -> SSHCertificateBuilder: + if not isinstance(name, bytes) or not isinstance(value, bytes): + raise TypeError("name and value must be bytes") + # This is O(n**2) + if name in [name for name, _ in self._critical_options]: + raise ValueError("Duplicate critical option name") + + return SSHCertificateBuilder( + _public_key=self._public_key, + _serial=self._serial, + _type=self._type, + _key_id=self._key_id, + _valid_principals=self._valid_principals, + _valid_for_all_principals=self._valid_for_all_principals, + _valid_before=self._valid_before, + _valid_after=self._valid_after, + _critical_options=[*self._critical_options, (name, value)], + _extensions=self._extensions, + ) + + def add_extension( + self, name: bytes, value: bytes + ) -> SSHCertificateBuilder: + if not isinstance(name, bytes) or not isinstance(value, bytes): + raise TypeError("name and value must be bytes") + # This is O(n**2) + if name in [name for name, _ in self._extensions]: + raise ValueError("Duplicate extension name") + + return SSHCertificateBuilder( + _public_key=self._public_key, + _serial=self._serial, + _type=self._type, + _key_id=self._key_id, + _valid_principals=self._valid_principals, + _valid_for_all_principals=self._valid_for_all_principals, + _valid_before=self._valid_before, + _valid_after=self._valid_after, + _critical_options=self._critical_options, + _extensions=[*self._extensions, (name, value)], + ) + + def sign(self, private_key: SSHCertPrivateKeyTypes) -> SSHCertificate: + if not isinstance( + private_key, + ( + ec.EllipticCurvePrivateKey, + rsa.RSAPrivateKey, + ed25519.Ed25519PrivateKey, + ), + ): + raise TypeError("Unsupported private key type") + + if self._public_key is None: + raise ValueError("public_key must be set") + + # Not required + serial = 0 if self._serial is None else self._serial + + if self._type is None: + raise ValueError("type must be set") + + # Not required + key_id = b"" if self._key_id is None else self._key_id + + # A zero length list is valid, but means the certificate + # is valid for any principal of the specified type. We require + # the user to explicitly set valid_for_all_principals to get + # that behavior. + if not self._valid_principals and not self._valid_for_all_principals: + raise ValueError( + "valid_principals must be set if valid_for_all_principals " + "is False" + ) + + if self._valid_before is None: + raise ValueError("valid_before must be set") + + if self._valid_after is None: + raise ValueError("valid_after must be set") + + if self._valid_after > self._valid_before: + raise ValueError("valid_after must be earlier than valid_before") + + # lexically sort our byte strings + self._critical_options.sort(key=lambda x: x[0]) + self._extensions.sort(key=lambda x: x[0]) + + key_type = _get_ssh_key_type(self._public_key) + cert_prefix = key_type + _CERT_SUFFIX + + # Marshal the bytes to be signed + nonce = os.urandom(32) + kformat = _lookup_kformat(key_type) + f = _FragList() + f.put_sshstr(cert_prefix) + f.put_sshstr(nonce) + kformat.encode_public(self._public_key, f) + f.put_u64(serial) + f.put_u32(self._type.value) + f.put_sshstr(key_id) + fprincipals = _FragList() + for p in self._valid_principals: + fprincipals.put_sshstr(p) + f.put_sshstr(fprincipals.tobytes()) + f.put_u64(self._valid_after) + f.put_u64(self._valid_before) + fcrit = _FragList() + for name, value in self._critical_options: + fcrit.put_sshstr(name) + if len(value) > 0: + foptval = _FragList() + foptval.put_sshstr(value) + fcrit.put_sshstr(foptval.tobytes()) + else: + fcrit.put_sshstr(value) + f.put_sshstr(fcrit.tobytes()) + fext = _FragList() + for name, value in self._extensions: + fext.put_sshstr(name) + if len(value) > 0: + fextval = _FragList() + fextval.put_sshstr(value) + fext.put_sshstr(fextval.tobytes()) + else: + fext.put_sshstr(value) + f.put_sshstr(fext.tobytes()) + f.put_sshstr(b"") # RESERVED FIELD + # encode CA public key + ca_type = _get_ssh_key_type(private_key) + caformat = _lookup_kformat(ca_type) + caf = _FragList() + caf.put_sshstr(ca_type) + caformat.encode_public(private_key.public_key(), caf) + f.put_sshstr(caf.tobytes()) + # Sigs according to the rules defined for the CA's public key + # (RFC4253 section 6.6 for ssh-rsa, RFC5656 for ECDSA, + # and RFC8032 for Ed25519). + if isinstance(private_key, ed25519.Ed25519PrivateKey): + signature = private_key.sign(f.tobytes()) + fsig = _FragList() + fsig.put_sshstr(ca_type) + fsig.put_sshstr(signature) + f.put_sshstr(fsig.tobytes()) + elif isinstance(private_key, ec.EllipticCurvePrivateKey): + hash_alg = _get_ec_hash_alg(private_key.curve) + signature = private_key.sign(f.tobytes(), ec.ECDSA(hash_alg)) + r, s = asym_utils.decode_dss_signature(signature) + fsig = _FragList() + fsig.put_sshstr(ca_type) + fsigblob = _FragList() + fsigblob.put_mpint(r) + fsigblob.put_mpint(s) + fsig.put_sshstr(fsigblob.tobytes()) + f.put_sshstr(fsig.tobytes()) + + else: + assert isinstance(private_key, rsa.RSAPrivateKey) + # Just like Golang, we're going to use SHA512 for RSA + # https://cs.opensource.google/go/x/crypto/+/refs/tags/ + # v0.4.0:ssh/certs.go;l=445 + # RFC 8332 defines SHA256 and 512 as options + fsig = _FragList() + fsig.put_sshstr(_SSH_RSA_SHA512) + signature = private_key.sign( + f.tobytes(), padding.PKCS1v15(), hashes.SHA512() + ) + fsig.put_sshstr(signature) + f.put_sshstr(fsig.tobytes()) + + cert_data = binascii.b2a_base64(f.tobytes()).strip() + # load_ssh_public_identity returns a union, but this is + # guaranteed to be an SSHCertificate, so we cast to make + # mypy happy. + return typing.cast( + SSHCertificate, + load_ssh_public_identity(b"".join([cert_prefix, b" ", cert_data])), + ) diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/__init__.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/__init__.py new file mode 100644 index 00000000..c1af4230 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/__init__.py @@ -0,0 +1,9 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + + +class InvalidToken(Exception): + pass diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..cbb4b3ab Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-312.pyc new file mode 100644 index 00000000..44e92a5e Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-312.pyc new file mode 100644 index 00000000..7ad950bf Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/hotp.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/hotp.py new file mode 100644 index 00000000..21fb0004 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/hotp.py @@ -0,0 +1,101 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import base64 +import typing +from urllib.parse import quote, urlencode + +from cryptography.hazmat.primitives import constant_time, hmac +from cryptography.hazmat.primitives.hashes import SHA1, SHA256, SHA512 +from cryptography.hazmat.primitives.twofactor import InvalidToken +from cryptography.utils import Buffer + +HOTPHashTypes = typing.Union[SHA1, SHA256, SHA512] + + +def _generate_uri( + hotp: HOTP, + type_name: str, + account_name: str, + issuer: str | None, + extra_parameters: list[tuple[str, int]], +) -> str: + parameters = [ + ("digits", hotp._length), + ("secret", base64.b32encode(hotp._key)), + ("algorithm", hotp._algorithm.name.upper()), + ] + + if issuer is not None: + parameters.append(("issuer", issuer)) + + parameters.extend(extra_parameters) + + label = ( + f"{quote(issuer)}:{quote(account_name)}" + if issuer + else quote(account_name) + ) + return f"otpauth://{type_name}/{label}?{urlencode(parameters)}" + + +class HOTP: + def __init__( + self, + key: Buffer, + length: int, + algorithm: HOTPHashTypes, + backend: typing.Any = None, + enforce_key_length: bool = True, + ) -> None: + if len(key) < 16 and enforce_key_length is True: + raise ValueError("Key length has to be at least 128 bits.") + + if not isinstance(length, int): + raise TypeError("Length parameter must be an integer type.") + + if length < 6 or length > 8: + raise ValueError("Length of HOTP has to be between 6 and 8.") + + if not isinstance(algorithm, (SHA1, SHA256, SHA512)): + raise TypeError("Algorithm must be SHA1, SHA256 or SHA512.") + + self._key = key + self._length = length + self._algorithm = algorithm + + def generate(self, counter: int) -> bytes: + if not isinstance(counter, int): + raise TypeError("Counter parameter must be an integer type.") + + truncated_value = self._dynamic_truncate(counter) + hotp = truncated_value % (10**self._length) + return "{0:0{1}}".format(hotp, self._length).encode() + + def verify(self, hotp: bytes, counter: int) -> None: + if not constant_time.bytes_eq(self.generate(counter), hotp): + raise InvalidToken("Supplied HOTP value does not match.") + + def _dynamic_truncate(self, counter: int) -> int: + ctx = hmac.HMAC(self._key, self._algorithm) + + try: + ctx.update(counter.to_bytes(length=8, byteorder="big")) + except OverflowError: + raise ValueError(f"Counter must be between 0 and {2**64 - 1}.") + + hmac_value = ctx.finalize() + + offset = hmac_value[len(hmac_value) - 1] & 0b1111 + p = hmac_value[offset : offset + 4] + return int.from_bytes(p, byteorder="big") & 0x7FFFFFFF + + def get_provisioning_uri( + self, account_name: str, counter: int, issuer: str | None + ) -> str: + return _generate_uri( + self, "hotp", account_name, issuer, [("counter", int(counter))] + ) diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/totp.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/totp.py new file mode 100644 index 00000000..10c725cc --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/totp.py @@ -0,0 +1,56 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import typing + +from cryptography.hazmat.primitives import constant_time +from cryptography.hazmat.primitives.twofactor import InvalidToken +from cryptography.hazmat.primitives.twofactor.hotp import ( + HOTP, + HOTPHashTypes, + _generate_uri, +) +from cryptography.utils import Buffer + + +class TOTP: + def __init__( + self, + key: Buffer, + length: int, + algorithm: HOTPHashTypes, + time_step: int, + backend: typing.Any = None, + enforce_key_length: bool = True, + ): + self._time_step = time_step + self._hotp = HOTP( + key, length, algorithm, enforce_key_length=enforce_key_length + ) + + def generate(self, time: int | float) -> bytes: + if not isinstance(time, (int, float)): + raise TypeError( + "Time parameter must be an integer type or float type." + ) + + counter = int(time / self._time_step) + return self._hotp.generate(counter) + + def verify(self, totp: bytes, time: int) -> None: + if not constant_time.bytes_eq(self.generate(time), totp): + raise InvalidToken("Supplied TOTP value does not match.") + + def get_provisioning_uri( + self, account_name: str, issuer: str | None + ) -> str: + return _generate_uri( + self._hotp, + "totp", + account_name, + issuer, + [("period", int(self._time_step))], + ) diff --git a/venv/lib/python3.12/site-packages/cryptography/py.typed b/venv/lib/python3.12/site-packages/cryptography/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/venv/lib/python3.12/site-packages/cryptography/utils.py b/venv/lib/python3.12/site-packages/cryptography/utils.py new file mode 100644 index 00000000..a0fc3b1d --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/utils.py @@ -0,0 +1,137 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import enum +import sys +import types +import typing +import warnings +from collections.abc import Callable, Sequence + + +# We use a UserWarning subclass, instead of DeprecationWarning, because CPython +# decided deprecation warnings should be invisible by default. +class CryptographyDeprecationWarning(UserWarning): + pass + + +# Several APIs were deprecated with no specific end-of-life date because of the +# ubiquity of their use. They should not be removed until we agree on when that +# cycle ends. +DeprecatedIn36 = CryptographyDeprecationWarning +DeprecatedIn40 = CryptographyDeprecationWarning +DeprecatedIn41 = CryptographyDeprecationWarning +DeprecatedIn42 = CryptographyDeprecationWarning +DeprecatedIn43 = CryptographyDeprecationWarning + + +# If you're wondering why we don't use `Buffer`, it's because `Buffer` would +# be more accurately named: Bufferable. It means something which has an +# `__buffer__`. Which means you can't actually treat the result as a buffer +# (and do things like take a `len()`). +if sys.version_info >= (3, 9): + Buffer = typing.Union[bytes, bytearray, memoryview] +else: + Buffer = typing.ByteString + + +def _check_bytes(name: str, value: bytes) -> None: + if not isinstance(value, bytes): + raise TypeError(f"{name} must be bytes") + + +def _check_byteslike(name: str, value: Buffer) -> None: + try: + memoryview(value) + except TypeError: + raise TypeError(f"{name} must be bytes-like") + + +def int_to_bytes(integer: int, length: int | None = None) -> bytes: + if length == 0: + raise ValueError("length argument can't be 0") + return integer.to_bytes( + length or (integer.bit_length() + 7) // 8 or 1, "big" + ) + + +class InterfaceNotImplemented(Exception): + pass + + +class _DeprecatedValue: + def __init__(self, value: object, message: str, warning_class): + self.value = value + self.message = message + self.warning_class = warning_class + + +class _ModuleWithDeprecations(types.ModuleType): + def __init__(self, module: types.ModuleType): + super().__init__(module.__name__) + self.__dict__["_module"] = module + + def __getattr__(self, attr: str) -> object: + obj = getattr(self._module, attr) + if isinstance(obj, _DeprecatedValue): + warnings.warn(obj.message, obj.warning_class, stacklevel=2) + obj = obj.value + return obj + + def __setattr__(self, attr: str, value: object) -> None: + setattr(self._module, attr, value) + + def __delattr__(self, attr: str) -> None: + obj = getattr(self._module, attr) + if isinstance(obj, _DeprecatedValue): + warnings.warn(obj.message, obj.warning_class, stacklevel=2) + + delattr(self._module, attr) + + def __dir__(self) -> Sequence[str]: + return ["_module", *dir(self._module)] + + +def deprecated( + value: object, + module_name: str, + message: str, + warning_class: type[Warning], + name: str | None = None, +) -> _DeprecatedValue: + module = sys.modules[module_name] + if not isinstance(module, _ModuleWithDeprecations): + sys.modules[module_name] = module = _ModuleWithDeprecations(module) + dv = _DeprecatedValue(value, message, warning_class) + # Maintain backwards compatibility with `name is None` for pyOpenSSL. + if name is not None: + setattr(module, name, dv) + return dv + + +def cached_property(func: Callable) -> property: + cached_name = f"_cached_{func}" + sentinel = object() + + def inner(instance: object): + cache = getattr(instance, cached_name, sentinel) + if cache is not sentinel: + return cache + result = func(instance) + setattr(instance, cached_name, result) + return result + + return property(inner) + + +# Python 3.10 changed representation of enums. We use well-defined object +# representation and string representation from Python 3.9. +class Enum(enum.Enum): + def __repr__(self) -> str: + return f"<{self.__class__.__name__}.{self._name_}: {self._value_!r}>" + + def __str__(self) -> str: + return f"{self.__class__.__name__}.{self._name_}" diff --git a/venv/lib/python3.12/site-packages/cryptography/x509/__init__.py b/venv/lib/python3.12/site-packages/cryptography/x509/__init__.py new file mode 100644 index 00000000..318eecc9 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/x509/__init__.py @@ -0,0 +1,270 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.x509 import certificate_transparency, verification +from cryptography.x509.base import ( + Attribute, + AttributeNotFound, + Attributes, + Certificate, + CertificateBuilder, + CertificateRevocationList, + CertificateRevocationListBuilder, + CertificateSigningRequest, + CertificateSigningRequestBuilder, + InvalidVersion, + RevokedCertificate, + RevokedCertificateBuilder, + Version, + load_der_x509_certificate, + load_der_x509_crl, + load_der_x509_csr, + load_pem_x509_certificate, + load_pem_x509_certificates, + load_pem_x509_crl, + load_pem_x509_csr, + random_serial_number, +) +from cryptography.x509.extensions import ( + AccessDescription, + Admission, + Admissions, + AuthorityInformationAccess, + AuthorityKeyIdentifier, + BasicConstraints, + CertificateIssuer, + CertificatePolicies, + CRLDistributionPoints, + CRLNumber, + CRLReason, + DeltaCRLIndicator, + DistributionPoint, + DuplicateExtension, + ExtendedKeyUsage, + Extension, + ExtensionNotFound, + Extensions, + ExtensionType, + FreshestCRL, + GeneralNames, + InhibitAnyPolicy, + InvalidityDate, + IssuerAlternativeName, + IssuingDistributionPoint, + KeyUsage, + MSCertificateTemplate, + NameConstraints, + NamingAuthority, + NoticeReference, + OCSPAcceptableResponses, + OCSPNoCheck, + OCSPNonce, + PolicyConstraints, + PolicyInformation, + PrecertificateSignedCertificateTimestamps, + PrecertPoison, + PrivateKeyUsagePeriod, + ProfessionInfo, + ReasonFlags, + SignedCertificateTimestamps, + SubjectAlternativeName, + SubjectInformationAccess, + SubjectKeyIdentifier, + TLSFeature, + TLSFeatureType, + UnrecognizedExtension, + UserNotice, +) +from cryptography.x509.general_name import ( + DirectoryName, + DNSName, + GeneralName, + IPAddress, + OtherName, + RegisteredID, + RFC822Name, + UniformResourceIdentifier, + UnsupportedGeneralNameType, +) +from cryptography.x509.name import ( + Name, + NameAttribute, + RelativeDistinguishedName, +) +from cryptography.x509.oid import ( + AuthorityInformationAccessOID, + CertificatePoliciesOID, + CRLEntryExtensionOID, + ExtendedKeyUsageOID, + ExtensionOID, + NameOID, + ObjectIdentifier, + PublicKeyAlgorithmOID, + SignatureAlgorithmOID, +) + +OID_AUTHORITY_INFORMATION_ACCESS = ExtensionOID.AUTHORITY_INFORMATION_ACCESS +OID_AUTHORITY_KEY_IDENTIFIER = ExtensionOID.AUTHORITY_KEY_IDENTIFIER +OID_BASIC_CONSTRAINTS = ExtensionOID.BASIC_CONSTRAINTS +OID_CERTIFICATE_POLICIES = ExtensionOID.CERTIFICATE_POLICIES +OID_CRL_DISTRIBUTION_POINTS = ExtensionOID.CRL_DISTRIBUTION_POINTS +OID_EXTENDED_KEY_USAGE = ExtensionOID.EXTENDED_KEY_USAGE +OID_FRESHEST_CRL = ExtensionOID.FRESHEST_CRL +OID_INHIBIT_ANY_POLICY = ExtensionOID.INHIBIT_ANY_POLICY +OID_ISSUER_ALTERNATIVE_NAME = ExtensionOID.ISSUER_ALTERNATIVE_NAME +OID_KEY_USAGE = ExtensionOID.KEY_USAGE +OID_PRIVATE_KEY_USAGE_PERIOD = ExtensionOID.PRIVATE_KEY_USAGE_PERIOD +OID_NAME_CONSTRAINTS = ExtensionOID.NAME_CONSTRAINTS +OID_OCSP_NO_CHECK = ExtensionOID.OCSP_NO_CHECK +OID_POLICY_CONSTRAINTS = ExtensionOID.POLICY_CONSTRAINTS +OID_POLICY_MAPPINGS = ExtensionOID.POLICY_MAPPINGS +OID_SUBJECT_ALTERNATIVE_NAME = ExtensionOID.SUBJECT_ALTERNATIVE_NAME +OID_SUBJECT_DIRECTORY_ATTRIBUTES = ExtensionOID.SUBJECT_DIRECTORY_ATTRIBUTES +OID_SUBJECT_INFORMATION_ACCESS = ExtensionOID.SUBJECT_INFORMATION_ACCESS +OID_SUBJECT_KEY_IDENTIFIER = ExtensionOID.SUBJECT_KEY_IDENTIFIER + +OID_DSA_WITH_SHA1 = SignatureAlgorithmOID.DSA_WITH_SHA1 +OID_DSA_WITH_SHA224 = SignatureAlgorithmOID.DSA_WITH_SHA224 +OID_DSA_WITH_SHA256 = SignatureAlgorithmOID.DSA_WITH_SHA256 +OID_ECDSA_WITH_SHA1 = SignatureAlgorithmOID.ECDSA_WITH_SHA1 +OID_ECDSA_WITH_SHA224 = SignatureAlgorithmOID.ECDSA_WITH_SHA224 +OID_ECDSA_WITH_SHA256 = SignatureAlgorithmOID.ECDSA_WITH_SHA256 +OID_ECDSA_WITH_SHA384 = SignatureAlgorithmOID.ECDSA_WITH_SHA384 +OID_ECDSA_WITH_SHA512 = SignatureAlgorithmOID.ECDSA_WITH_SHA512 +OID_RSA_WITH_MD5 = SignatureAlgorithmOID.RSA_WITH_MD5 +OID_RSA_WITH_SHA1 = SignatureAlgorithmOID.RSA_WITH_SHA1 +OID_RSA_WITH_SHA224 = SignatureAlgorithmOID.RSA_WITH_SHA224 +OID_RSA_WITH_SHA256 = SignatureAlgorithmOID.RSA_WITH_SHA256 +OID_RSA_WITH_SHA384 = SignatureAlgorithmOID.RSA_WITH_SHA384 +OID_RSA_WITH_SHA512 = SignatureAlgorithmOID.RSA_WITH_SHA512 +OID_RSASSA_PSS = SignatureAlgorithmOID.RSASSA_PSS + +OID_COMMON_NAME = NameOID.COMMON_NAME +OID_COUNTRY_NAME = NameOID.COUNTRY_NAME +OID_DOMAIN_COMPONENT = NameOID.DOMAIN_COMPONENT +OID_DN_QUALIFIER = NameOID.DN_QUALIFIER +OID_EMAIL_ADDRESS = NameOID.EMAIL_ADDRESS +OID_GENERATION_QUALIFIER = NameOID.GENERATION_QUALIFIER +OID_GIVEN_NAME = NameOID.GIVEN_NAME +OID_LOCALITY_NAME = NameOID.LOCALITY_NAME +OID_ORGANIZATIONAL_UNIT_NAME = NameOID.ORGANIZATIONAL_UNIT_NAME +OID_ORGANIZATION_NAME = NameOID.ORGANIZATION_NAME +OID_PSEUDONYM = NameOID.PSEUDONYM +OID_SERIAL_NUMBER = NameOID.SERIAL_NUMBER +OID_STATE_OR_PROVINCE_NAME = NameOID.STATE_OR_PROVINCE_NAME +OID_SURNAME = NameOID.SURNAME +OID_TITLE = NameOID.TITLE + +OID_CLIENT_AUTH = ExtendedKeyUsageOID.CLIENT_AUTH +OID_CODE_SIGNING = ExtendedKeyUsageOID.CODE_SIGNING +OID_EMAIL_PROTECTION = ExtendedKeyUsageOID.EMAIL_PROTECTION +OID_OCSP_SIGNING = ExtendedKeyUsageOID.OCSP_SIGNING +OID_SERVER_AUTH = ExtendedKeyUsageOID.SERVER_AUTH +OID_TIME_STAMPING = ExtendedKeyUsageOID.TIME_STAMPING + +OID_ANY_POLICY = CertificatePoliciesOID.ANY_POLICY +OID_CPS_QUALIFIER = CertificatePoliciesOID.CPS_QUALIFIER +OID_CPS_USER_NOTICE = CertificatePoliciesOID.CPS_USER_NOTICE + +OID_CERTIFICATE_ISSUER = CRLEntryExtensionOID.CERTIFICATE_ISSUER +OID_CRL_REASON = CRLEntryExtensionOID.CRL_REASON +OID_INVALIDITY_DATE = CRLEntryExtensionOID.INVALIDITY_DATE + +OID_CA_ISSUERS = AuthorityInformationAccessOID.CA_ISSUERS +OID_OCSP = AuthorityInformationAccessOID.OCSP + +__all__ = [ + "OID_CA_ISSUERS", + "OID_OCSP", + "AccessDescription", + "Admission", + "Admissions", + "Attribute", + "AttributeNotFound", + "Attributes", + "AuthorityInformationAccess", + "AuthorityKeyIdentifier", + "BasicConstraints", + "CRLDistributionPoints", + "CRLNumber", + "CRLReason", + "Certificate", + "CertificateBuilder", + "CertificateIssuer", + "CertificatePolicies", + "CertificateRevocationList", + "CertificateRevocationListBuilder", + "CertificateSigningRequest", + "CertificateSigningRequestBuilder", + "DNSName", + "DeltaCRLIndicator", + "DirectoryName", + "DistributionPoint", + "DuplicateExtension", + "ExtendedKeyUsage", + "Extension", + "ExtensionNotFound", + "ExtensionType", + "Extensions", + "FreshestCRL", + "GeneralName", + "GeneralNames", + "IPAddress", + "InhibitAnyPolicy", + "InvalidVersion", + "InvalidityDate", + "IssuerAlternativeName", + "IssuingDistributionPoint", + "KeyUsage", + "MSCertificateTemplate", + "Name", + "NameAttribute", + "NameConstraints", + "NameOID", + "NamingAuthority", + "NoticeReference", + "OCSPAcceptableResponses", + "OCSPNoCheck", + "OCSPNonce", + "ObjectIdentifier", + "OtherName", + "PolicyConstraints", + "PolicyInformation", + "PrecertPoison", + "PrecertificateSignedCertificateTimestamps", + "PrivateKeyUsagePeriod", + "ProfessionInfo", + "PublicKeyAlgorithmOID", + "RFC822Name", + "ReasonFlags", + "RegisteredID", + "RelativeDistinguishedName", + "RevokedCertificate", + "RevokedCertificateBuilder", + "SignatureAlgorithmOID", + "SignedCertificateTimestamps", + "SubjectAlternativeName", + "SubjectInformationAccess", + "SubjectKeyIdentifier", + "TLSFeature", + "TLSFeatureType", + "UniformResourceIdentifier", + "UnrecognizedExtension", + "UnsupportedGeneralNameType", + "UserNotice", + "Version", + "certificate_transparency", + "load_der_x509_certificate", + "load_der_x509_crl", + "load_der_x509_csr", + "load_pem_x509_certificate", + "load_pem_x509_certificates", + "load_pem_x509_crl", + "load_pem_x509_csr", + "random_serial_number", + "verification", + "verification", +] diff --git a/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..34ad7689 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/base.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/base.cpython-312.pyc new file mode 100644 index 00000000..f02396a5 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/base.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/certificate_transparency.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/certificate_transparency.cpython-312.pyc new file mode 100644 index 00000000..4f2e50df Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/certificate_transparency.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/extensions.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/extensions.cpython-312.pyc new file mode 100644 index 00000000..23e7dc63 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/extensions.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/general_name.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/general_name.cpython-312.pyc new file mode 100644 index 00000000..c54eb804 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/general_name.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/name.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/name.cpython-312.pyc new file mode 100644 index 00000000..3adb7325 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/name.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/ocsp.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/ocsp.cpython-312.pyc new file mode 100644 index 00000000..10847e67 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/ocsp.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/oid.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/oid.cpython-312.pyc new file mode 100644 index 00000000..b470f80f Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/oid.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/verification.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/verification.cpython-312.pyc new file mode 100644 index 00000000..f6839d26 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/verification.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/x509/base.py b/venv/lib/python3.12/site-packages/cryptography/x509/base.py new file mode 100644 index 00000000..1be612be --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/x509/base.py @@ -0,0 +1,848 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc +import datetime +import os +import typing +import warnings +from collections.abc import Iterable + +from cryptography import utils +from cryptography.hazmat.bindings._rust import x509 as rust_x509 +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives.asymmetric import ( + dsa, + ec, + ed448, + ed25519, + padding, + rsa, + x448, + x25519, +) +from cryptography.hazmat.primitives.asymmetric.types import ( + CertificateIssuerPrivateKeyTypes, + CertificatePublicKeyTypes, +) +from cryptography.x509.extensions import ( + Extension, + Extensions, + ExtensionType, + _make_sequence_methods, +) +from cryptography.x509.name import Name, _ASN1Type +from cryptography.x509.oid import ObjectIdentifier + +_EARLIEST_UTC_TIME = datetime.datetime(1950, 1, 1) + +# This must be kept in sync with sign.rs's list of allowable types in +# identify_hash_type +_AllowedHashTypes = typing.Union[ + hashes.SHA224, + hashes.SHA256, + hashes.SHA384, + hashes.SHA512, + hashes.SHA3_224, + hashes.SHA3_256, + hashes.SHA3_384, + hashes.SHA3_512, +] + + +class AttributeNotFound(Exception): + def __init__(self, msg: str, oid: ObjectIdentifier) -> None: + super().__init__(msg) + self.oid = oid + + +def _reject_duplicate_extension( + extension: Extension[ExtensionType], + extensions: list[Extension[ExtensionType]], +) -> None: + # This is quadratic in the number of extensions + for e in extensions: + if e.oid == extension.oid: + raise ValueError("This extension has already been set.") + + +def _reject_duplicate_attribute( + oid: ObjectIdentifier, + attributes: list[tuple[ObjectIdentifier, bytes, int | None]], +) -> None: + # This is quadratic in the number of attributes + for attr_oid, _, _ in attributes: + if attr_oid == oid: + raise ValueError("This attribute has already been set.") + + +def _convert_to_naive_utc_time(time: datetime.datetime) -> datetime.datetime: + """Normalizes a datetime to a naive datetime in UTC. + + time -- datetime to normalize. Assumed to be in UTC if not timezone + aware. + """ + if time.tzinfo is not None: + offset = time.utcoffset() + offset = offset if offset else datetime.timedelta() + return time.replace(tzinfo=None) - offset + else: + return time + + +class Attribute: + def __init__( + self, + oid: ObjectIdentifier, + value: bytes, + _type: int = _ASN1Type.UTF8String.value, + ) -> None: + self._oid = oid + self._value = value + self._type = _type + + @property + def oid(self) -> ObjectIdentifier: + return self._oid + + @property + def value(self) -> bytes: + return self._value + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Attribute): + return NotImplemented + + return ( + self.oid == other.oid + and self.value == other.value + and self._type == other._type + ) + + def __hash__(self) -> int: + return hash((self.oid, self.value, self._type)) + + +class Attributes: + def __init__( + self, + attributes: Iterable[Attribute], + ) -> None: + self._attributes = list(attributes) + + __len__, __iter__, __getitem__ = _make_sequence_methods("_attributes") + + def __repr__(self) -> str: + return f"" + + def get_attribute_for_oid(self, oid: ObjectIdentifier) -> Attribute: + for attr in self: + if attr.oid == oid: + return attr + + raise AttributeNotFound(f"No {oid} attribute was found", oid) + + +class Version(utils.Enum): + v1 = 0 + v3 = 2 + + +class InvalidVersion(Exception): + def __init__(self, msg: str, parsed_version: int) -> None: + super().__init__(msg) + self.parsed_version = parsed_version + + +Certificate = rust_x509.Certificate + + +class RevokedCertificate(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def serial_number(self) -> int: + """ + Returns the serial number of the revoked certificate. + """ + + @property + @abc.abstractmethod + def revocation_date(self) -> datetime.datetime: + """ + Returns the date of when this certificate was revoked. + """ + + @property + @abc.abstractmethod + def revocation_date_utc(self) -> datetime.datetime: + """ + Returns the date of when this certificate was revoked as a non-naive + UTC datetime. + """ + + @property + @abc.abstractmethod + def extensions(self) -> Extensions: + """ + Returns an Extensions object containing a list of Revoked extensions. + """ + + +# Runtime isinstance checks need this since the rust class is not a subclass. +RevokedCertificate.register(rust_x509.RevokedCertificate) + + +class _RawRevokedCertificate(RevokedCertificate): + def __init__( + self, + serial_number: int, + revocation_date: datetime.datetime, + extensions: Extensions, + ): + self._serial_number = serial_number + self._revocation_date = revocation_date + self._extensions = extensions + + @property + def serial_number(self) -> int: + return self._serial_number + + @property + def revocation_date(self) -> datetime.datetime: + warnings.warn( + "Properties that return a naïve datetime object have been " + "deprecated. Please switch to revocation_date_utc.", + utils.DeprecatedIn42, + stacklevel=2, + ) + return self._revocation_date + + @property + def revocation_date_utc(self) -> datetime.datetime: + return self._revocation_date.replace(tzinfo=datetime.timezone.utc) + + @property + def extensions(self) -> Extensions: + return self._extensions + + +CertificateRevocationList = rust_x509.CertificateRevocationList +CertificateSigningRequest = rust_x509.CertificateSigningRequest + + +load_pem_x509_certificate = rust_x509.load_pem_x509_certificate +load_der_x509_certificate = rust_x509.load_der_x509_certificate + +load_pem_x509_certificates = rust_x509.load_pem_x509_certificates + +load_pem_x509_csr = rust_x509.load_pem_x509_csr +load_der_x509_csr = rust_x509.load_der_x509_csr + +load_pem_x509_crl = rust_x509.load_pem_x509_crl +load_der_x509_crl = rust_x509.load_der_x509_crl + + +class CertificateSigningRequestBuilder: + def __init__( + self, + subject_name: Name | None = None, + extensions: list[Extension[ExtensionType]] = [], + attributes: list[tuple[ObjectIdentifier, bytes, int | None]] = [], + ): + """ + Creates an empty X.509 certificate request (v1). + """ + self._subject_name = subject_name + self._extensions = extensions + self._attributes = attributes + + def subject_name(self, name: Name) -> CertificateSigningRequestBuilder: + """ + Sets the certificate requestor's distinguished name. + """ + if not isinstance(name, Name): + raise TypeError("Expecting x509.Name object.") + if self._subject_name is not None: + raise ValueError("The subject name may only be set once.") + return CertificateSigningRequestBuilder( + name, self._extensions, self._attributes + ) + + def add_extension( + self, extval: ExtensionType, critical: bool + ) -> CertificateSigningRequestBuilder: + """ + Adds an X.509 extension to the certificate request. + """ + if not isinstance(extval, ExtensionType): + raise TypeError("extension must be an ExtensionType") + + extension = Extension(extval.oid, critical, extval) + _reject_duplicate_extension(extension, self._extensions) + + return CertificateSigningRequestBuilder( + self._subject_name, + [*self._extensions, extension], + self._attributes, + ) + + def add_attribute( + self, + oid: ObjectIdentifier, + value: bytes, + *, + _tag: _ASN1Type | None = None, + ) -> CertificateSigningRequestBuilder: + """ + Adds an X.509 attribute with an OID and associated value. + """ + if not isinstance(oid, ObjectIdentifier): + raise TypeError("oid must be an ObjectIdentifier") + + if not isinstance(value, bytes): + raise TypeError("value must be bytes") + + if _tag is not None and not isinstance(_tag, _ASN1Type): + raise TypeError("tag must be _ASN1Type") + + _reject_duplicate_attribute(oid, self._attributes) + + if _tag is not None: + tag = _tag.value + else: + tag = None + + return CertificateSigningRequestBuilder( + self._subject_name, + self._extensions, + [*self._attributes, (oid, value, tag)], + ) + + def sign( + self, + private_key: CertificateIssuerPrivateKeyTypes, + algorithm: _AllowedHashTypes | None, + backend: typing.Any = None, + *, + rsa_padding: padding.PSS | padding.PKCS1v15 | None = None, + ecdsa_deterministic: bool | None = None, + ) -> CertificateSigningRequest: + """ + Signs the request using the requestor's private key. + """ + if self._subject_name is None: + raise ValueError("A CertificateSigningRequest must have a subject") + + if rsa_padding is not None: + if not isinstance(rsa_padding, (padding.PSS, padding.PKCS1v15)): + raise TypeError("Padding must be PSS or PKCS1v15") + if not isinstance(private_key, rsa.RSAPrivateKey): + raise TypeError("Padding is only supported for RSA keys") + + if ecdsa_deterministic is not None: + if not isinstance(private_key, ec.EllipticCurvePrivateKey): + raise TypeError( + "Deterministic ECDSA is only supported for EC keys" + ) + + return rust_x509.create_x509_csr( + self, + private_key, + algorithm, + rsa_padding, + ecdsa_deterministic, + ) + + +class CertificateBuilder: + _extensions: list[Extension[ExtensionType]] + + def __init__( + self, + issuer_name: Name | None = None, + subject_name: Name | None = None, + public_key: CertificatePublicKeyTypes | None = None, + serial_number: int | None = None, + not_valid_before: datetime.datetime | None = None, + not_valid_after: datetime.datetime | None = None, + extensions: list[Extension[ExtensionType]] = [], + ) -> None: + self._version = Version.v3 + self._issuer_name = issuer_name + self._subject_name = subject_name + self._public_key = public_key + self._serial_number = serial_number + self._not_valid_before = not_valid_before + self._not_valid_after = not_valid_after + self._extensions = extensions + + def issuer_name(self, name: Name) -> CertificateBuilder: + """ + Sets the CA's distinguished name. + """ + if not isinstance(name, Name): + raise TypeError("Expecting x509.Name object.") + if self._issuer_name is not None: + raise ValueError("The issuer name may only be set once.") + return CertificateBuilder( + name, + self._subject_name, + self._public_key, + self._serial_number, + self._not_valid_before, + self._not_valid_after, + self._extensions, + ) + + def subject_name(self, name: Name) -> CertificateBuilder: + """ + Sets the requestor's distinguished name. + """ + if not isinstance(name, Name): + raise TypeError("Expecting x509.Name object.") + if self._subject_name is not None: + raise ValueError("The subject name may only be set once.") + return CertificateBuilder( + self._issuer_name, + name, + self._public_key, + self._serial_number, + self._not_valid_before, + self._not_valid_after, + self._extensions, + ) + + def public_key( + self, + key: CertificatePublicKeyTypes, + ) -> CertificateBuilder: + """ + Sets the requestor's public key (as found in the signing request). + """ + if not isinstance( + key, + ( + dsa.DSAPublicKey, + rsa.RSAPublicKey, + ec.EllipticCurvePublicKey, + ed25519.Ed25519PublicKey, + ed448.Ed448PublicKey, + x25519.X25519PublicKey, + x448.X448PublicKey, + ), + ): + raise TypeError( + "Expecting one of DSAPublicKey, RSAPublicKey," + " EllipticCurvePublicKey, Ed25519PublicKey," + " Ed448PublicKey, X25519PublicKey, or " + "X448PublicKey." + ) + if self._public_key is not None: + raise ValueError("The public key may only be set once.") + return CertificateBuilder( + self._issuer_name, + self._subject_name, + key, + self._serial_number, + self._not_valid_before, + self._not_valid_after, + self._extensions, + ) + + def serial_number(self, number: int) -> CertificateBuilder: + """ + Sets the certificate serial number. + """ + if not isinstance(number, int): + raise TypeError("Serial number must be of integral type.") + if self._serial_number is not None: + raise ValueError("The serial number may only be set once.") + if number <= 0: + raise ValueError("The serial number should be positive.") + + # ASN.1 integers are always signed, so most significant bit must be + # zero. + if number.bit_length() >= 160: # As defined in RFC 5280 + raise ValueError( + "The serial number should not be more than 159 bits." + ) + return CertificateBuilder( + self._issuer_name, + self._subject_name, + self._public_key, + number, + self._not_valid_before, + self._not_valid_after, + self._extensions, + ) + + def not_valid_before(self, time: datetime.datetime) -> CertificateBuilder: + """ + Sets the certificate activation time. + """ + if not isinstance(time, datetime.datetime): + raise TypeError("Expecting datetime object.") + if self._not_valid_before is not None: + raise ValueError("The not valid before may only be set once.") + time = _convert_to_naive_utc_time(time) + if time < _EARLIEST_UTC_TIME: + raise ValueError( + "The not valid before date must be on or after" + " 1950 January 1)." + ) + if self._not_valid_after is not None and time > self._not_valid_after: + raise ValueError( + "The not valid before date must be before the not valid after " + "date." + ) + return CertificateBuilder( + self._issuer_name, + self._subject_name, + self._public_key, + self._serial_number, + time, + self._not_valid_after, + self._extensions, + ) + + def not_valid_after(self, time: datetime.datetime) -> CertificateBuilder: + """ + Sets the certificate expiration time. + """ + if not isinstance(time, datetime.datetime): + raise TypeError("Expecting datetime object.") + if self._not_valid_after is not None: + raise ValueError("The not valid after may only be set once.") + time = _convert_to_naive_utc_time(time) + if time < _EARLIEST_UTC_TIME: + raise ValueError( + "The not valid after date must be on or after 1950 January 1." + ) + if ( + self._not_valid_before is not None + and time < self._not_valid_before + ): + raise ValueError( + "The not valid after date must be after the not valid before " + "date." + ) + return CertificateBuilder( + self._issuer_name, + self._subject_name, + self._public_key, + self._serial_number, + self._not_valid_before, + time, + self._extensions, + ) + + def add_extension( + self, extval: ExtensionType, critical: bool + ) -> CertificateBuilder: + """ + Adds an X.509 extension to the certificate. + """ + if not isinstance(extval, ExtensionType): + raise TypeError("extension must be an ExtensionType") + + extension = Extension(extval.oid, critical, extval) + _reject_duplicate_extension(extension, self._extensions) + + return CertificateBuilder( + self._issuer_name, + self._subject_name, + self._public_key, + self._serial_number, + self._not_valid_before, + self._not_valid_after, + [*self._extensions, extension], + ) + + def sign( + self, + private_key: CertificateIssuerPrivateKeyTypes, + algorithm: _AllowedHashTypes | None, + backend: typing.Any = None, + *, + rsa_padding: padding.PSS | padding.PKCS1v15 | None = None, + ecdsa_deterministic: bool | None = None, + ) -> Certificate: + """ + Signs the certificate using the CA's private key. + """ + if self._subject_name is None: + raise ValueError("A certificate must have a subject name") + + if self._issuer_name is None: + raise ValueError("A certificate must have an issuer name") + + if self._serial_number is None: + raise ValueError("A certificate must have a serial number") + + if self._not_valid_before is None: + raise ValueError("A certificate must have a not valid before time") + + if self._not_valid_after is None: + raise ValueError("A certificate must have a not valid after time") + + if self._public_key is None: + raise ValueError("A certificate must have a public key") + + if rsa_padding is not None: + if not isinstance(rsa_padding, (padding.PSS, padding.PKCS1v15)): + raise TypeError("Padding must be PSS or PKCS1v15") + if not isinstance(private_key, rsa.RSAPrivateKey): + raise TypeError("Padding is only supported for RSA keys") + + if ecdsa_deterministic is not None: + if not isinstance(private_key, ec.EllipticCurvePrivateKey): + raise TypeError( + "Deterministic ECDSA is only supported for EC keys" + ) + + return rust_x509.create_x509_certificate( + self, + private_key, + algorithm, + rsa_padding, + ecdsa_deterministic, + ) + + +class CertificateRevocationListBuilder: + _extensions: list[Extension[ExtensionType]] + _revoked_certificates: list[RevokedCertificate] + + def __init__( + self, + issuer_name: Name | None = None, + last_update: datetime.datetime | None = None, + next_update: datetime.datetime | None = None, + extensions: list[Extension[ExtensionType]] = [], + revoked_certificates: list[RevokedCertificate] = [], + ): + self._issuer_name = issuer_name + self._last_update = last_update + self._next_update = next_update + self._extensions = extensions + self._revoked_certificates = revoked_certificates + + def issuer_name( + self, issuer_name: Name + ) -> CertificateRevocationListBuilder: + if not isinstance(issuer_name, Name): + raise TypeError("Expecting x509.Name object.") + if self._issuer_name is not None: + raise ValueError("The issuer name may only be set once.") + return CertificateRevocationListBuilder( + issuer_name, + self._last_update, + self._next_update, + self._extensions, + self._revoked_certificates, + ) + + def last_update( + self, last_update: datetime.datetime + ) -> CertificateRevocationListBuilder: + if not isinstance(last_update, datetime.datetime): + raise TypeError("Expecting datetime object.") + if self._last_update is not None: + raise ValueError("Last update may only be set once.") + last_update = _convert_to_naive_utc_time(last_update) + if last_update < _EARLIEST_UTC_TIME: + raise ValueError( + "The last update date must be on or after 1950 January 1." + ) + if self._next_update is not None and last_update > self._next_update: + raise ValueError( + "The last update date must be before the next update date." + ) + return CertificateRevocationListBuilder( + self._issuer_name, + last_update, + self._next_update, + self._extensions, + self._revoked_certificates, + ) + + def next_update( + self, next_update: datetime.datetime + ) -> CertificateRevocationListBuilder: + if not isinstance(next_update, datetime.datetime): + raise TypeError("Expecting datetime object.") + if self._next_update is not None: + raise ValueError("Last update may only be set once.") + next_update = _convert_to_naive_utc_time(next_update) + if next_update < _EARLIEST_UTC_TIME: + raise ValueError( + "The last update date must be on or after 1950 January 1." + ) + if self._last_update is not None and next_update < self._last_update: + raise ValueError( + "The next update date must be after the last update date." + ) + return CertificateRevocationListBuilder( + self._issuer_name, + self._last_update, + next_update, + self._extensions, + self._revoked_certificates, + ) + + def add_extension( + self, extval: ExtensionType, critical: bool + ) -> CertificateRevocationListBuilder: + """ + Adds an X.509 extension to the certificate revocation list. + """ + if not isinstance(extval, ExtensionType): + raise TypeError("extension must be an ExtensionType") + + extension = Extension(extval.oid, critical, extval) + _reject_duplicate_extension(extension, self._extensions) + return CertificateRevocationListBuilder( + self._issuer_name, + self._last_update, + self._next_update, + [*self._extensions, extension], + self._revoked_certificates, + ) + + def add_revoked_certificate( + self, revoked_certificate: RevokedCertificate + ) -> CertificateRevocationListBuilder: + """ + Adds a revoked certificate to the CRL. + """ + if not isinstance(revoked_certificate, RevokedCertificate): + raise TypeError("Must be an instance of RevokedCertificate") + + return CertificateRevocationListBuilder( + self._issuer_name, + self._last_update, + self._next_update, + self._extensions, + [*self._revoked_certificates, revoked_certificate], + ) + + def sign( + self, + private_key: CertificateIssuerPrivateKeyTypes, + algorithm: _AllowedHashTypes | None, + backend: typing.Any = None, + *, + rsa_padding: padding.PSS | padding.PKCS1v15 | None = None, + ecdsa_deterministic: bool | None = None, + ) -> CertificateRevocationList: + if self._issuer_name is None: + raise ValueError("A CRL must have an issuer name") + + if self._last_update is None: + raise ValueError("A CRL must have a last update time") + + if self._next_update is None: + raise ValueError("A CRL must have a next update time") + + if rsa_padding is not None: + if not isinstance(rsa_padding, (padding.PSS, padding.PKCS1v15)): + raise TypeError("Padding must be PSS or PKCS1v15") + if not isinstance(private_key, rsa.RSAPrivateKey): + raise TypeError("Padding is only supported for RSA keys") + + if ecdsa_deterministic is not None: + if not isinstance(private_key, ec.EllipticCurvePrivateKey): + raise TypeError( + "Deterministic ECDSA is only supported for EC keys" + ) + + return rust_x509.create_x509_crl( + self, + private_key, + algorithm, + rsa_padding, + ecdsa_deterministic, + ) + + +class RevokedCertificateBuilder: + def __init__( + self, + serial_number: int | None = None, + revocation_date: datetime.datetime | None = None, + extensions: list[Extension[ExtensionType]] = [], + ): + self._serial_number = serial_number + self._revocation_date = revocation_date + self._extensions = extensions + + def serial_number(self, number: int) -> RevokedCertificateBuilder: + if not isinstance(number, int): + raise TypeError("Serial number must be of integral type.") + if self._serial_number is not None: + raise ValueError("The serial number may only be set once.") + if number <= 0: + raise ValueError("The serial number should be positive") + + # ASN.1 integers are always signed, so most significant bit must be + # zero. + if number.bit_length() >= 160: # As defined in RFC 5280 + raise ValueError( + "The serial number should not be more than 159 bits." + ) + return RevokedCertificateBuilder( + number, self._revocation_date, self._extensions + ) + + def revocation_date( + self, time: datetime.datetime + ) -> RevokedCertificateBuilder: + if not isinstance(time, datetime.datetime): + raise TypeError("Expecting datetime object.") + if self._revocation_date is not None: + raise ValueError("The revocation date may only be set once.") + time = _convert_to_naive_utc_time(time) + if time < _EARLIEST_UTC_TIME: + raise ValueError( + "The revocation date must be on or after 1950 January 1." + ) + return RevokedCertificateBuilder( + self._serial_number, time, self._extensions + ) + + def add_extension( + self, extval: ExtensionType, critical: bool + ) -> RevokedCertificateBuilder: + if not isinstance(extval, ExtensionType): + raise TypeError("extension must be an ExtensionType") + + extension = Extension(extval.oid, critical, extval) + _reject_duplicate_extension(extension, self._extensions) + return RevokedCertificateBuilder( + self._serial_number, + self._revocation_date, + [*self._extensions, extension], + ) + + def build(self, backend: typing.Any = None) -> RevokedCertificate: + if self._serial_number is None: + raise ValueError("A revoked certificate must have a serial number") + if self._revocation_date is None: + raise ValueError( + "A revoked certificate must have a revocation date" + ) + return _RawRevokedCertificate( + self._serial_number, + self._revocation_date, + Extensions(self._extensions), + ) + + +def random_serial_number() -> int: + return int.from_bytes(os.urandom(20), "big") >> 1 diff --git a/venv/lib/python3.12/site-packages/cryptography/x509/certificate_transparency.py b/venv/lib/python3.12/site-packages/cryptography/x509/certificate_transparency.py new file mode 100644 index 00000000..fb66cc60 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/x509/certificate_transparency.py @@ -0,0 +1,35 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography import utils +from cryptography.hazmat.bindings._rust import x509 as rust_x509 + + +class LogEntryType(utils.Enum): + X509_CERTIFICATE = 0 + PRE_CERTIFICATE = 1 + + +class Version(utils.Enum): + v1 = 0 + + +class SignatureAlgorithm(utils.Enum): + """ + Signature algorithms that are valid for SCTs. + + These are exactly the same as SignatureAlgorithm in RFC 5246 (TLS 1.2). + + See: + """ + + ANONYMOUS = 0 + RSA = 1 + DSA = 2 + ECDSA = 3 + + +SignedCertificateTimestamp = rust_x509.Sct diff --git a/venv/lib/python3.12/site-packages/cryptography/x509/extensions.py b/venv/lib/python3.12/site-packages/cryptography/x509/extensions.py new file mode 100644 index 00000000..dfa472d3 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/x509/extensions.py @@ -0,0 +1,2528 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc +import datetime +import hashlib +import ipaddress +import typing +from collections.abc import Iterable, Iterator + +from cryptography import utils +from cryptography.hazmat.bindings._rust import asn1 +from cryptography.hazmat.bindings._rust import x509 as rust_x509 +from cryptography.hazmat.primitives import constant_time, serialization +from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurvePublicKey +from cryptography.hazmat.primitives.asymmetric.rsa import RSAPublicKey +from cryptography.hazmat.primitives.asymmetric.types import ( + CertificateIssuerPublicKeyTypes, + CertificatePublicKeyTypes, +) +from cryptography.x509.certificate_transparency import ( + SignedCertificateTimestamp, +) +from cryptography.x509.general_name import ( + DirectoryName, + DNSName, + GeneralName, + IPAddress, + OtherName, + RegisteredID, + RFC822Name, + UniformResourceIdentifier, + _IPAddressTypes, +) +from cryptography.x509.name import Name, RelativeDistinguishedName +from cryptography.x509.oid import ( + CRLEntryExtensionOID, + ExtensionOID, + ObjectIdentifier, + OCSPExtensionOID, +) + +ExtensionTypeVar = typing.TypeVar( + "ExtensionTypeVar", bound="ExtensionType", covariant=True +) + + +def _key_identifier_from_public_key( + public_key: CertificatePublicKeyTypes, +) -> bytes: + if isinstance(public_key, RSAPublicKey): + data = public_key.public_bytes( + serialization.Encoding.DER, + serialization.PublicFormat.PKCS1, + ) + elif isinstance(public_key, EllipticCurvePublicKey): + data = public_key.public_bytes( + serialization.Encoding.X962, + serialization.PublicFormat.UncompressedPoint, + ) + else: + # This is a very slow way to do this. + serialized = public_key.public_bytes( + serialization.Encoding.DER, + serialization.PublicFormat.SubjectPublicKeyInfo, + ) + data = asn1.parse_spki_for_data(serialized) + + return hashlib.sha1(data).digest() + + +def _make_sequence_methods(field_name: str): + def len_method(self) -> int: + return len(getattr(self, field_name)) + + def iter_method(self): + return iter(getattr(self, field_name)) + + def getitem_method(self, idx): + return getattr(self, field_name)[idx] + + return len_method, iter_method, getitem_method + + +class DuplicateExtension(Exception): + def __init__(self, msg: str, oid: ObjectIdentifier) -> None: + super().__init__(msg) + self.oid = oid + + +class ExtensionNotFound(Exception): + def __init__(self, msg: str, oid: ObjectIdentifier) -> None: + super().__init__(msg) + self.oid = oid + + +class ExtensionType(metaclass=abc.ABCMeta): + oid: typing.ClassVar[ObjectIdentifier] + + def public_bytes(self) -> bytes: + """ + Serializes the extension type to DER. + """ + raise NotImplementedError( + f"public_bytes is not implemented for extension type {self!r}" + ) + + +class Extensions: + def __init__(self, extensions: Iterable[Extension[ExtensionType]]) -> None: + self._extensions = list(extensions) + + def get_extension_for_oid( + self, oid: ObjectIdentifier + ) -> Extension[ExtensionType]: + for ext in self: + if ext.oid == oid: + return ext + + raise ExtensionNotFound(f"No {oid} extension was found", oid) + + def get_extension_for_class( + self, extclass: type[ExtensionTypeVar] + ) -> Extension[ExtensionTypeVar]: + if extclass is UnrecognizedExtension: + raise TypeError( + "UnrecognizedExtension can't be used with " + "get_extension_for_class because more than one instance of the" + " class may be present." + ) + + for ext in self: + if isinstance(ext.value, extclass): + return ext + + raise ExtensionNotFound( + f"No {extclass} extension was found", extclass.oid + ) + + __len__, __iter__, __getitem__ = _make_sequence_methods("_extensions") + + def __repr__(self) -> str: + return f"" + + +class CRLNumber(ExtensionType): + oid = ExtensionOID.CRL_NUMBER + + def __init__(self, crl_number: int) -> None: + if not isinstance(crl_number, int): + raise TypeError("crl_number must be an integer") + + self._crl_number = crl_number + + def __eq__(self, other: object) -> bool: + if not isinstance(other, CRLNumber): + return NotImplemented + + return self.crl_number == other.crl_number + + def __hash__(self) -> int: + return hash(self.crl_number) + + def __repr__(self) -> str: + return f"" + + @property + def crl_number(self) -> int: + return self._crl_number + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class AuthorityKeyIdentifier(ExtensionType): + oid = ExtensionOID.AUTHORITY_KEY_IDENTIFIER + + def __init__( + self, + key_identifier: bytes | None, + authority_cert_issuer: Iterable[GeneralName] | None, + authority_cert_serial_number: int | None, + ) -> None: + if (authority_cert_issuer is None) != ( + authority_cert_serial_number is None + ): + raise ValueError( + "authority_cert_issuer and authority_cert_serial_number " + "must both be present or both None" + ) + + if authority_cert_issuer is not None: + authority_cert_issuer = list(authority_cert_issuer) + if not all( + isinstance(x, GeneralName) for x in authority_cert_issuer + ): + raise TypeError( + "authority_cert_issuer must be a list of GeneralName " + "objects" + ) + + if authority_cert_serial_number is not None and not isinstance( + authority_cert_serial_number, int + ): + raise TypeError("authority_cert_serial_number must be an integer") + + self._key_identifier = key_identifier + self._authority_cert_issuer = authority_cert_issuer + self._authority_cert_serial_number = authority_cert_serial_number + + # This takes a subset of CertificatePublicKeyTypes because an issuer + # cannot have an X25519/X448 key. This introduces some unfortunate + # asymmetry that requires typing users to explicitly + # narrow their type, but we should make this accurate and not just + # convenient. + @classmethod + def from_issuer_public_key( + cls, public_key: CertificateIssuerPublicKeyTypes + ) -> AuthorityKeyIdentifier: + digest = _key_identifier_from_public_key(public_key) + return cls( + key_identifier=digest, + authority_cert_issuer=None, + authority_cert_serial_number=None, + ) + + @classmethod + def from_issuer_subject_key_identifier( + cls, ski: SubjectKeyIdentifier + ) -> AuthorityKeyIdentifier: + return cls( + key_identifier=ski.digest, + authority_cert_issuer=None, + authority_cert_serial_number=None, + ) + + def __repr__(self) -> str: + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, AuthorityKeyIdentifier): + return NotImplemented + + return ( + self.key_identifier == other.key_identifier + and self.authority_cert_issuer == other.authority_cert_issuer + and self.authority_cert_serial_number + == other.authority_cert_serial_number + ) + + def __hash__(self) -> int: + if self.authority_cert_issuer is None: + aci = None + else: + aci = tuple(self.authority_cert_issuer) + return hash( + (self.key_identifier, aci, self.authority_cert_serial_number) + ) + + @property + def key_identifier(self) -> bytes | None: + return self._key_identifier + + @property + def authority_cert_issuer( + self, + ) -> list[GeneralName] | None: + return self._authority_cert_issuer + + @property + def authority_cert_serial_number(self) -> int | None: + return self._authority_cert_serial_number + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class SubjectKeyIdentifier(ExtensionType): + oid = ExtensionOID.SUBJECT_KEY_IDENTIFIER + + def __init__(self, digest: bytes) -> None: + self._digest = digest + + @classmethod + def from_public_key( + cls, public_key: CertificatePublicKeyTypes + ) -> SubjectKeyIdentifier: + return cls(_key_identifier_from_public_key(public_key)) + + @property + def digest(self) -> bytes: + return self._digest + + @property + def key_identifier(self) -> bytes: + return self._digest + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, SubjectKeyIdentifier): + return NotImplemented + + return constant_time.bytes_eq(self.digest, other.digest) + + def __hash__(self) -> int: + return hash(self.digest) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class AuthorityInformationAccess(ExtensionType): + oid = ExtensionOID.AUTHORITY_INFORMATION_ACCESS + + def __init__(self, descriptions: Iterable[AccessDescription]) -> None: + descriptions = list(descriptions) + if not all(isinstance(x, AccessDescription) for x in descriptions): + raise TypeError( + "Every item in the descriptions list must be an " + "AccessDescription" + ) + + self._descriptions = descriptions + + __len__, __iter__, __getitem__ = _make_sequence_methods("_descriptions") + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, AuthorityInformationAccess): + return NotImplemented + + return self._descriptions == other._descriptions + + def __hash__(self) -> int: + return hash(tuple(self._descriptions)) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class SubjectInformationAccess(ExtensionType): + oid = ExtensionOID.SUBJECT_INFORMATION_ACCESS + + def __init__(self, descriptions: Iterable[AccessDescription]) -> None: + descriptions = list(descriptions) + if not all(isinstance(x, AccessDescription) for x in descriptions): + raise TypeError( + "Every item in the descriptions list must be an " + "AccessDescription" + ) + + self._descriptions = descriptions + + __len__, __iter__, __getitem__ = _make_sequence_methods("_descriptions") + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, SubjectInformationAccess): + return NotImplemented + + return self._descriptions == other._descriptions + + def __hash__(self) -> int: + return hash(tuple(self._descriptions)) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class AccessDescription: + def __init__( + self, access_method: ObjectIdentifier, access_location: GeneralName + ) -> None: + if not isinstance(access_method, ObjectIdentifier): + raise TypeError("access_method must be an ObjectIdentifier") + + if not isinstance(access_location, GeneralName): + raise TypeError("access_location must be a GeneralName") + + self._access_method = access_method + self._access_location = access_location + + def __repr__(self) -> str: + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, AccessDescription): + return NotImplemented + + return ( + self.access_method == other.access_method + and self.access_location == other.access_location + ) + + def __hash__(self) -> int: + return hash((self.access_method, self.access_location)) + + @property + def access_method(self) -> ObjectIdentifier: + return self._access_method + + @property + def access_location(self) -> GeneralName: + return self._access_location + + +class BasicConstraints(ExtensionType): + oid = ExtensionOID.BASIC_CONSTRAINTS + + def __init__(self, ca: bool, path_length: int | None) -> None: + if not isinstance(ca, bool): + raise TypeError("ca must be a boolean value") + + if path_length is not None and not ca: + raise ValueError("path_length must be None when ca is False") + + if path_length is not None and ( + not isinstance(path_length, int) or path_length < 0 + ): + raise TypeError( + "path_length must be a non-negative integer or None" + ) + + self._ca = ca + self._path_length = path_length + + @property + def ca(self) -> bool: + return self._ca + + @property + def path_length(self) -> int | None: + return self._path_length + + def __repr__(self) -> str: + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, BasicConstraints): + return NotImplemented + + return self.ca == other.ca and self.path_length == other.path_length + + def __hash__(self) -> int: + return hash((self.ca, self.path_length)) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class DeltaCRLIndicator(ExtensionType): + oid = ExtensionOID.DELTA_CRL_INDICATOR + + def __init__(self, crl_number: int) -> None: + if not isinstance(crl_number, int): + raise TypeError("crl_number must be an integer") + + self._crl_number = crl_number + + @property + def crl_number(self) -> int: + return self._crl_number + + def __eq__(self, other: object) -> bool: + if not isinstance(other, DeltaCRLIndicator): + return NotImplemented + + return self.crl_number == other.crl_number + + def __hash__(self) -> int: + return hash(self.crl_number) + + def __repr__(self) -> str: + return f"" + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class CRLDistributionPoints(ExtensionType): + oid = ExtensionOID.CRL_DISTRIBUTION_POINTS + + def __init__( + self, distribution_points: Iterable[DistributionPoint] + ) -> None: + distribution_points = list(distribution_points) + if not all( + isinstance(x, DistributionPoint) for x in distribution_points + ): + raise TypeError( + "distribution_points must be a list of DistributionPoint " + "objects" + ) + + self._distribution_points = distribution_points + + __len__, __iter__, __getitem__ = _make_sequence_methods( + "_distribution_points" + ) + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, CRLDistributionPoints): + return NotImplemented + + return self._distribution_points == other._distribution_points + + def __hash__(self) -> int: + return hash(tuple(self._distribution_points)) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class FreshestCRL(ExtensionType): + oid = ExtensionOID.FRESHEST_CRL + + def __init__( + self, distribution_points: Iterable[DistributionPoint] + ) -> None: + distribution_points = list(distribution_points) + if not all( + isinstance(x, DistributionPoint) for x in distribution_points + ): + raise TypeError( + "distribution_points must be a list of DistributionPoint " + "objects" + ) + + self._distribution_points = distribution_points + + __len__, __iter__, __getitem__ = _make_sequence_methods( + "_distribution_points" + ) + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, FreshestCRL): + return NotImplemented + + return self._distribution_points == other._distribution_points + + def __hash__(self) -> int: + return hash(tuple(self._distribution_points)) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class DistributionPoint: + def __init__( + self, + full_name: Iterable[GeneralName] | None, + relative_name: RelativeDistinguishedName | None, + reasons: frozenset[ReasonFlags] | None, + crl_issuer: Iterable[GeneralName] | None, + ) -> None: + if full_name and relative_name: + raise ValueError( + "You cannot provide both full_name and relative_name, at " + "least one must be None." + ) + if not full_name and not relative_name and not crl_issuer: + raise ValueError( + "Either full_name, relative_name or crl_issuer must be " + "provided." + ) + + if full_name is not None: + full_name = list(full_name) + if not all(isinstance(x, GeneralName) for x in full_name): + raise TypeError( + "full_name must be a list of GeneralName objects" + ) + + if relative_name: + if not isinstance(relative_name, RelativeDistinguishedName): + raise TypeError( + "relative_name must be a RelativeDistinguishedName" + ) + + if crl_issuer is not None: + crl_issuer = list(crl_issuer) + if not all(isinstance(x, GeneralName) for x in crl_issuer): + raise TypeError( + "crl_issuer must be None or a list of general names" + ) + + if reasons and ( + not isinstance(reasons, frozenset) + or not all(isinstance(x, ReasonFlags) for x in reasons) + ): + raise TypeError("reasons must be None or frozenset of ReasonFlags") + + if reasons and ( + ReasonFlags.unspecified in reasons + or ReasonFlags.remove_from_crl in reasons + ): + raise ValueError( + "unspecified and remove_from_crl are not valid reasons in a " + "DistributionPoint" + ) + + self._full_name = full_name + self._relative_name = relative_name + self._reasons = reasons + self._crl_issuer = crl_issuer + + def __repr__(self) -> str: + return ( + "".format(self) + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, DistributionPoint): + return NotImplemented + + return ( + self.full_name == other.full_name + and self.relative_name == other.relative_name + and self.reasons == other.reasons + and self.crl_issuer == other.crl_issuer + ) + + def __hash__(self) -> int: + if self.full_name is not None: + fn: tuple[GeneralName, ...] | None = tuple(self.full_name) + else: + fn = None + + if self.crl_issuer is not None: + crl_issuer: tuple[GeneralName, ...] | None = tuple(self.crl_issuer) + else: + crl_issuer = None + + return hash((fn, self.relative_name, self.reasons, crl_issuer)) + + @property + def full_name(self) -> list[GeneralName] | None: + return self._full_name + + @property + def relative_name(self) -> RelativeDistinguishedName | None: + return self._relative_name + + @property + def reasons(self) -> frozenset[ReasonFlags] | None: + return self._reasons + + @property + def crl_issuer(self) -> list[GeneralName] | None: + return self._crl_issuer + + +class ReasonFlags(utils.Enum): + unspecified = "unspecified" + key_compromise = "keyCompromise" + ca_compromise = "cACompromise" + affiliation_changed = "affiliationChanged" + superseded = "superseded" + cessation_of_operation = "cessationOfOperation" + certificate_hold = "certificateHold" + privilege_withdrawn = "privilegeWithdrawn" + aa_compromise = "aACompromise" + remove_from_crl = "removeFromCRL" + + +# These are distribution point bit string mappings. Not to be confused with +# CRLReason reason flags bit string mappings. +# ReasonFlags ::= BIT STRING { +# unused (0), +# keyCompromise (1), +# cACompromise (2), +# affiliationChanged (3), +# superseded (4), +# cessationOfOperation (5), +# certificateHold (6), +# privilegeWithdrawn (7), +# aACompromise (8) } +_REASON_BIT_MAPPING = { + 1: ReasonFlags.key_compromise, + 2: ReasonFlags.ca_compromise, + 3: ReasonFlags.affiliation_changed, + 4: ReasonFlags.superseded, + 5: ReasonFlags.cessation_of_operation, + 6: ReasonFlags.certificate_hold, + 7: ReasonFlags.privilege_withdrawn, + 8: ReasonFlags.aa_compromise, +} + +_CRLREASONFLAGS = { + ReasonFlags.key_compromise: 1, + ReasonFlags.ca_compromise: 2, + ReasonFlags.affiliation_changed: 3, + ReasonFlags.superseded: 4, + ReasonFlags.cessation_of_operation: 5, + ReasonFlags.certificate_hold: 6, + ReasonFlags.privilege_withdrawn: 7, + ReasonFlags.aa_compromise: 8, +} + +# CRLReason ::= ENUMERATED { +# unspecified (0), +# keyCompromise (1), +# cACompromise (2), +# affiliationChanged (3), +# superseded (4), +# cessationOfOperation (5), +# certificateHold (6), +# -- value 7 is not used +# removeFromCRL (8), +# privilegeWithdrawn (9), +# aACompromise (10) } +_CRL_ENTRY_REASON_ENUM_TO_CODE = { + ReasonFlags.unspecified: 0, + ReasonFlags.key_compromise: 1, + ReasonFlags.ca_compromise: 2, + ReasonFlags.affiliation_changed: 3, + ReasonFlags.superseded: 4, + ReasonFlags.cessation_of_operation: 5, + ReasonFlags.certificate_hold: 6, + ReasonFlags.remove_from_crl: 8, + ReasonFlags.privilege_withdrawn: 9, + ReasonFlags.aa_compromise: 10, +} + + +class PolicyConstraints(ExtensionType): + oid = ExtensionOID.POLICY_CONSTRAINTS + + def __init__( + self, + require_explicit_policy: int | None, + inhibit_policy_mapping: int | None, + ) -> None: + if require_explicit_policy is not None and not isinstance( + require_explicit_policy, int + ): + raise TypeError( + "require_explicit_policy must be a non-negative integer or " + "None" + ) + + if inhibit_policy_mapping is not None and not isinstance( + inhibit_policy_mapping, int + ): + raise TypeError( + "inhibit_policy_mapping must be a non-negative integer or None" + ) + + if inhibit_policy_mapping is None and require_explicit_policy is None: + raise ValueError( + "At least one of require_explicit_policy and " + "inhibit_policy_mapping must not be None" + ) + + self._require_explicit_policy = require_explicit_policy + self._inhibit_policy_mapping = inhibit_policy_mapping + + def __repr__(self) -> str: + return ( + "".format(self) + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, PolicyConstraints): + return NotImplemented + + return ( + self.require_explicit_policy == other.require_explicit_policy + and self.inhibit_policy_mapping == other.inhibit_policy_mapping + ) + + def __hash__(self) -> int: + return hash( + (self.require_explicit_policy, self.inhibit_policy_mapping) + ) + + @property + def require_explicit_policy(self) -> int | None: + return self._require_explicit_policy + + @property + def inhibit_policy_mapping(self) -> int | None: + return self._inhibit_policy_mapping + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class CertificatePolicies(ExtensionType): + oid = ExtensionOID.CERTIFICATE_POLICIES + + def __init__(self, policies: Iterable[PolicyInformation]) -> None: + policies = list(policies) + if not all(isinstance(x, PolicyInformation) for x in policies): + raise TypeError( + "Every item in the policies list must be a PolicyInformation" + ) + + self._policies = policies + + __len__, __iter__, __getitem__ = _make_sequence_methods("_policies") + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, CertificatePolicies): + return NotImplemented + + return self._policies == other._policies + + def __hash__(self) -> int: + return hash(tuple(self._policies)) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class PolicyInformation: + def __init__( + self, + policy_identifier: ObjectIdentifier, + policy_qualifiers: Iterable[str | UserNotice] | None, + ) -> None: + if not isinstance(policy_identifier, ObjectIdentifier): + raise TypeError("policy_identifier must be an ObjectIdentifier") + + self._policy_identifier = policy_identifier + + if policy_qualifiers is not None: + policy_qualifiers = list(policy_qualifiers) + if not all( + isinstance(x, (str, UserNotice)) for x in policy_qualifiers + ): + raise TypeError( + "policy_qualifiers must be a list of strings and/or " + "UserNotice objects or None" + ) + + self._policy_qualifiers = policy_qualifiers + + def __repr__(self) -> str: + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, PolicyInformation): + return NotImplemented + + return ( + self.policy_identifier == other.policy_identifier + and self.policy_qualifiers == other.policy_qualifiers + ) + + def __hash__(self) -> int: + if self.policy_qualifiers is not None: + pq = tuple(self.policy_qualifiers) + else: + pq = None + + return hash((self.policy_identifier, pq)) + + @property + def policy_identifier(self) -> ObjectIdentifier: + return self._policy_identifier + + @property + def policy_qualifiers( + self, + ) -> list[str | UserNotice] | None: + return self._policy_qualifiers + + +class UserNotice: + def __init__( + self, + notice_reference: NoticeReference | None, + explicit_text: str | None, + ) -> None: + if notice_reference and not isinstance( + notice_reference, NoticeReference + ): + raise TypeError( + "notice_reference must be None or a NoticeReference" + ) + + self._notice_reference = notice_reference + self._explicit_text = explicit_text + + def __repr__(self) -> str: + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, UserNotice): + return NotImplemented + + return ( + self.notice_reference == other.notice_reference + and self.explicit_text == other.explicit_text + ) + + def __hash__(self) -> int: + return hash((self.notice_reference, self.explicit_text)) + + @property + def notice_reference(self) -> NoticeReference | None: + return self._notice_reference + + @property + def explicit_text(self) -> str | None: + return self._explicit_text + + +class NoticeReference: + def __init__( + self, + organization: str | None, + notice_numbers: Iterable[int], + ) -> None: + self._organization = organization + notice_numbers = list(notice_numbers) + if not all(isinstance(x, int) for x in notice_numbers): + raise TypeError("notice_numbers must be a list of integers") + + self._notice_numbers = notice_numbers + + def __repr__(self) -> str: + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, NoticeReference): + return NotImplemented + + return ( + self.organization == other.organization + and self.notice_numbers == other.notice_numbers + ) + + def __hash__(self) -> int: + return hash((self.organization, tuple(self.notice_numbers))) + + @property + def organization(self) -> str | None: + return self._organization + + @property + def notice_numbers(self) -> list[int]: + return self._notice_numbers + + +class ExtendedKeyUsage(ExtensionType): + oid = ExtensionOID.EXTENDED_KEY_USAGE + + def __init__(self, usages: Iterable[ObjectIdentifier]) -> None: + usages = list(usages) + if not all(isinstance(x, ObjectIdentifier) for x in usages): + raise TypeError( + "Every item in the usages list must be an ObjectIdentifier" + ) + + self._usages = usages + + __len__, __iter__, __getitem__ = _make_sequence_methods("_usages") + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, ExtendedKeyUsage): + return NotImplemented + + return self._usages == other._usages + + def __hash__(self) -> int: + return hash(tuple(self._usages)) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class OCSPNoCheck(ExtensionType): + oid = ExtensionOID.OCSP_NO_CHECK + + def __eq__(self, other: object) -> bool: + if not isinstance(other, OCSPNoCheck): + return NotImplemented + + return True + + def __hash__(self) -> int: + return hash(OCSPNoCheck) + + def __repr__(self) -> str: + return "" + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class PrecertPoison(ExtensionType): + oid = ExtensionOID.PRECERT_POISON + + def __eq__(self, other: object) -> bool: + if not isinstance(other, PrecertPoison): + return NotImplemented + + return True + + def __hash__(self) -> int: + return hash(PrecertPoison) + + def __repr__(self) -> str: + return "" + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class TLSFeature(ExtensionType): + oid = ExtensionOID.TLS_FEATURE + + def __init__(self, features: Iterable[TLSFeatureType]) -> None: + features = list(features) + if ( + not all(isinstance(x, TLSFeatureType) for x in features) + or len(features) == 0 + ): + raise TypeError( + "features must be a list of elements from the TLSFeatureType " + "enum" + ) + + self._features = features + + __len__, __iter__, __getitem__ = _make_sequence_methods("_features") + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, TLSFeature): + return NotImplemented + + return self._features == other._features + + def __hash__(self) -> int: + return hash(tuple(self._features)) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class TLSFeatureType(utils.Enum): + # status_request is defined in RFC 6066 and is used for what is commonly + # called OCSP Must-Staple when present in the TLS Feature extension in an + # X.509 certificate. + status_request = 5 + # status_request_v2 is defined in RFC 6961 and allows multiple OCSP + # responses to be provided. It is not currently in use by clients or + # servers. + status_request_v2 = 17 + + +_TLS_FEATURE_TYPE_TO_ENUM = {x.value: x for x in TLSFeatureType} + + +class InhibitAnyPolicy(ExtensionType): + oid = ExtensionOID.INHIBIT_ANY_POLICY + + def __init__(self, skip_certs: int) -> None: + if not isinstance(skip_certs, int): + raise TypeError("skip_certs must be an integer") + + if skip_certs < 0: + raise ValueError("skip_certs must be a non-negative integer") + + self._skip_certs = skip_certs + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, InhibitAnyPolicy): + return NotImplemented + + return self.skip_certs == other.skip_certs + + def __hash__(self) -> int: + return hash(self.skip_certs) + + @property + def skip_certs(self) -> int: + return self._skip_certs + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class KeyUsage(ExtensionType): + oid = ExtensionOID.KEY_USAGE + + def __init__( + self, + digital_signature: bool, + content_commitment: bool, + key_encipherment: bool, + data_encipherment: bool, + key_agreement: bool, + key_cert_sign: bool, + crl_sign: bool, + encipher_only: bool, + decipher_only: bool, + ) -> None: + if not key_agreement and (encipher_only or decipher_only): + raise ValueError( + "encipher_only and decipher_only can only be true when " + "key_agreement is true" + ) + + self._digital_signature = digital_signature + self._content_commitment = content_commitment + self._key_encipherment = key_encipherment + self._data_encipherment = data_encipherment + self._key_agreement = key_agreement + self._key_cert_sign = key_cert_sign + self._crl_sign = crl_sign + self._encipher_only = encipher_only + self._decipher_only = decipher_only + + @property + def digital_signature(self) -> bool: + return self._digital_signature + + @property + def content_commitment(self) -> bool: + return self._content_commitment + + @property + def key_encipherment(self) -> bool: + return self._key_encipherment + + @property + def data_encipherment(self) -> bool: + return self._data_encipherment + + @property + def key_agreement(self) -> bool: + return self._key_agreement + + @property + def key_cert_sign(self) -> bool: + return self._key_cert_sign + + @property + def crl_sign(self) -> bool: + return self._crl_sign + + @property + def encipher_only(self) -> bool: + if not self.key_agreement: + raise ValueError( + "encipher_only is undefined unless key_agreement is true" + ) + else: + return self._encipher_only + + @property + def decipher_only(self) -> bool: + if not self.key_agreement: + raise ValueError( + "decipher_only is undefined unless key_agreement is true" + ) + else: + return self._decipher_only + + def __repr__(self) -> str: + try: + encipher_only = self.encipher_only + decipher_only = self.decipher_only + except ValueError: + # Users found None confusing because even though encipher/decipher + # have no meaning unless key_agreement is true, to construct an + # instance of the class you still need to pass False. + encipher_only = False + decipher_only = False + + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, KeyUsage): + return NotImplemented + + return ( + self.digital_signature == other.digital_signature + and self.content_commitment == other.content_commitment + and self.key_encipherment == other.key_encipherment + and self.data_encipherment == other.data_encipherment + and self.key_agreement == other.key_agreement + and self.key_cert_sign == other.key_cert_sign + and self.crl_sign == other.crl_sign + and self._encipher_only == other._encipher_only + and self._decipher_only == other._decipher_only + ) + + def __hash__(self) -> int: + return hash( + ( + self.digital_signature, + self.content_commitment, + self.key_encipherment, + self.data_encipherment, + self.key_agreement, + self.key_cert_sign, + self.crl_sign, + self._encipher_only, + self._decipher_only, + ) + ) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class PrivateKeyUsagePeriod(ExtensionType): + oid = ExtensionOID.PRIVATE_KEY_USAGE_PERIOD + + def __init__( + self, + not_before: datetime.datetime | None, + not_after: datetime.datetime | None, + ) -> None: + if ( + not isinstance(not_before, datetime.datetime) + and not_before is not None + ): + raise TypeError("not_before must be a datetime.datetime or None") + + if ( + not isinstance(not_after, datetime.datetime) + and not_after is not None + ): + raise TypeError("not_after must be a datetime.datetime or None") + + if not_before is None and not_after is None: + raise ValueError( + "At least one of not_before and not_after must not be None" + ) + + if ( + not_before is not None + and not_after is not None + and not_before > not_after + ): + raise ValueError("not_before must be before not_after") + + self._not_before = not_before + self._not_after = not_after + + @property + def not_before(self) -> datetime.datetime | None: + return self._not_before + + @property + def not_after(self) -> datetime.datetime | None: + return self._not_after + + def __repr__(self) -> str: + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, PrivateKeyUsagePeriod): + return NotImplemented + + return ( + self.not_before == other.not_before + and self.not_after == other.not_after + ) + + def __hash__(self) -> int: + return hash((self.not_before, self.not_after)) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class NameConstraints(ExtensionType): + oid = ExtensionOID.NAME_CONSTRAINTS + + def __init__( + self, + permitted_subtrees: Iterable[GeneralName] | None, + excluded_subtrees: Iterable[GeneralName] | None, + ) -> None: + if permitted_subtrees is not None: + permitted_subtrees = list(permitted_subtrees) + if not permitted_subtrees: + raise ValueError( + "permitted_subtrees must be a non-empty list or None" + ) + if not all(isinstance(x, GeneralName) for x in permitted_subtrees): + raise TypeError( + "permitted_subtrees must be a list of GeneralName objects " + "or None" + ) + + self._validate_tree(permitted_subtrees) + + if excluded_subtrees is not None: + excluded_subtrees = list(excluded_subtrees) + if not excluded_subtrees: + raise ValueError( + "excluded_subtrees must be a non-empty list or None" + ) + if not all(isinstance(x, GeneralName) for x in excluded_subtrees): + raise TypeError( + "excluded_subtrees must be a list of GeneralName objects " + "or None" + ) + + self._validate_tree(excluded_subtrees) + + if permitted_subtrees is None and excluded_subtrees is None: + raise ValueError( + "At least one of permitted_subtrees and excluded_subtrees " + "must not be None" + ) + + self._permitted_subtrees = permitted_subtrees + self._excluded_subtrees = excluded_subtrees + + def __eq__(self, other: object) -> bool: + if not isinstance(other, NameConstraints): + return NotImplemented + + return ( + self.excluded_subtrees == other.excluded_subtrees + and self.permitted_subtrees == other.permitted_subtrees + ) + + def _validate_tree(self, tree: Iterable[GeneralName]) -> None: + self._validate_ip_name(tree) + self._validate_dns_name(tree) + + def _validate_ip_name(self, tree: Iterable[GeneralName]) -> None: + if any( + isinstance(name, IPAddress) + and not isinstance( + name.value, (ipaddress.IPv4Network, ipaddress.IPv6Network) + ) + for name in tree + ): + raise TypeError( + "IPAddress name constraints must be an IPv4Network or" + " IPv6Network object" + ) + + def _validate_dns_name(self, tree: Iterable[GeneralName]) -> None: + if any( + isinstance(name, DNSName) and "*" in name.value for name in tree + ): + raise ValueError( + "DNSName name constraints must not contain the '*' wildcard" + " character" + ) + + def __repr__(self) -> str: + return ( + f"" + ) + + def __hash__(self) -> int: + if self.permitted_subtrees is not None: + ps: tuple[GeneralName, ...] | None = tuple(self.permitted_subtrees) + else: + ps = None + + if self.excluded_subtrees is not None: + es: tuple[GeneralName, ...] | None = tuple(self.excluded_subtrees) + else: + es = None + + return hash((ps, es)) + + @property + def permitted_subtrees( + self, + ) -> list[GeneralName] | None: + return self._permitted_subtrees + + @property + def excluded_subtrees( + self, + ) -> list[GeneralName] | None: + return self._excluded_subtrees + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class Extension(typing.Generic[ExtensionTypeVar]): + def __init__( + self, oid: ObjectIdentifier, critical: bool, value: ExtensionTypeVar + ) -> None: + if not isinstance(oid, ObjectIdentifier): + raise TypeError( + "oid argument must be an ObjectIdentifier instance." + ) + + if not isinstance(critical, bool): + raise TypeError("critical must be a boolean value") + + self._oid = oid + self._critical = critical + self._value = value + + @property + def oid(self) -> ObjectIdentifier: + return self._oid + + @property + def critical(self) -> bool: + return self._critical + + @property + def value(self) -> ExtensionTypeVar: + return self._value + + def __repr__(self) -> str: + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Extension): + return NotImplemented + + return ( + self.oid == other.oid + and self.critical == other.critical + and self.value == other.value + ) + + def __hash__(self) -> int: + return hash((self.oid, self.critical, self.value)) + + +class GeneralNames: + def __init__(self, general_names: Iterable[GeneralName]) -> None: + general_names = list(general_names) + if not all(isinstance(x, GeneralName) for x in general_names): + raise TypeError( + "Every item in the general_names list must be an " + "object conforming to the GeneralName interface" + ) + + self._general_names = general_names + + __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names") + + @typing.overload + def get_values_for_type( + self, + type: type[DNSName] + | type[UniformResourceIdentifier] + | type[RFC822Name], + ) -> list[str]: ... + + @typing.overload + def get_values_for_type( + self, + type: type[DirectoryName], + ) -> list[Name]: ... + + @typing.overload + def get_values_for_type( + self, + type: type[RegisteredID], + ) -> list[ObjectIdentifier]: ... + + @typing.overload + def get_values_for_type( + self, type: type[IPAddress] + ) -> list[_IPAddressTypes]: ... + + @typing.overload + def get_values_for_type( + self, type: type[OtherName] + ) -> list[OtherName]: ... + + def get_values_for_type( + self, + type: type[DNSName] + | type[DirectoryName] + | type[IPAddress] + | type[OtherName] + | type[RFC822Name] + | type[RegisteredID] + | type[UniformResourceIdentifier], + ) -> ( + list[_IPAddressTypes] + | list[str] + | list[OtherName] + | list[Name] + | list[ObjectIdentifier] + ): + # Return the value of each GeneralName, except for OtherName instances + # which we return directly because it has two important properties not + # just one value. + objs = (i for i in self if isinstance(i, type)) + if type != OtherName: + return [i.value for i in objs] + return list(objs) + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, GeneralNames): + return NotImplemented + + return self._general_names == other._general_names + + def __hash__(self) -> int: + return hash(tuple(self._general_names)) + + +class SubjectAlternativeName(ExtensionType): + oid = ExtensionOID.SUBJECT_ALTERNATIVE_NAME + + def __init__(self, general_names: Iterable[GeneralName]) -> None: + self._general_names = GeneralNames(general_names) + + __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names") + + @typing.overload + def get_values_for_type( + self, + type: type[DNSName] + | type[UniformResourceIdentifier] + | type[RFC822Name], + ) -> list[str]: ... + + @typing.overload + def get_values_for_type( + self, + type: type[DirectoryName], + ) -> list[Name]: ... + + @typing.overload + def get_values_for_type( + self, + type: type[RegisteredID], + ) -> list[ObjectIdentifier]: ... + + @typing.overload + def get_values_for_type( + self, type: type[IPAddress] + ) -> list[_IPAddressTypes]: ... + + @typing.overload + def get_values_for_type( + self, type: type[OtherName] + ) -> list[OtherName]: ... + + def get_values_for_type( + self, + type: type[DNSName] + | type[DirectoryName] + | type[IPAddress] + | type[OtherName] + | type[RFC822Name] + | type[RegisteredID] + | type[UniformResourceIdentifier], + ) -> ( + list[_IPAddressTypes] + | list[str] + | list[OtherName] + | list[Name] + | list[ObjectIdentifier] + ): + return self._general_names.get_values_for_type(type) + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, SubjectAlternativeName): + return NotImplemented + + return self._general_names == other._general_names + + def __hash__(self) -> int: + return hash(self._general_names) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class IssuerAlternativeName(ExtensionType): + oid = ExtensionOID.ISSUER_ALTERNATIVE_NAME + + def __init__(self, general_names: Iterable[GeneralName]) -> None: + self._general_names = GeneralNames(general_names) + + __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names") + + @typing.overload + def get_values_for_type( + self, + type: type[DNSName] + | type[UniformResourceIdentifier] + | type[RFC822Name], + ) -> list[str]: ... + + @typing.overload + def get_values_for_type( + self, + type: type[DirectoryName], + ) -> list[Name]: ... + + @typing.overload + def get_values_for_type( + self, + type: type[RegisteredID], + ) -> list[ObjectIdentifier]: ... + + @typing.overload + def get_values_for_type( + self, type: type[IPAddress] + ) -> list[_IPAddressTypes]: ... + + @typing.overload + def get_values_for_type( + self, type: type[OtherName] + ) -> list[OtherName]: ... + + def get_values_for_type( + self, + type: type[DNSName] + | type[DirectoryName] + | type[IPAddress] + | type[OtherName] + | type[RFC822Name] + | type[RegisteredID] + | type[UniformResourceIdentifier], + ) -> ( + list[_IPAddressTypes] + | list[str] + | list[OtherName] + | list[Name] + | list[ObjectIdentifier] + ): + return self._general_names.get_values_for_type(type) + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, IssuerAlternativeName): + return NotImplemented + + return self._general_names == other._general_names + + def __hash__(self) -> int: + return hash(self._general_names) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class CertificateIssuer(ExtensionType): + oid = CRLEntryExtensionOID.CERTIFICATE_ISSUER + + def __init__(self, general_names: Iterable[GeneralName]) -> None: + self._general_names = GeneralNames(general_names) + + __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names") + + @typing.overload + def get_values_for_type( + self, + type: type[DNSName] + | type[UniformResourceIdentifier] + | type[RFC822Name], + ) -> list[str]: ... + + @typing.overload + def get_values_for_type( + self, + type: type[DirectoryName], + ) -> list[Name]: ... + + @typing.overload + def get_values_for_type( + self, + type: type[RegisteredID], + ) -> list[ObjectIdentifier]: ... + + @typing.overload + def get_values_for_type( + self, type: type[IPAddress] + ) -> list[_IPAddressTypes]: ... + + @typing.overload + def get_values_for_type( + self, type: type[OtherName] + ) -> list[OtherName]: ... + + def get_values_for_type( + self, + type: type[DNSName] + | type[DirectoryName] + | type[IPAddress] + | type[OtherName] + | type[RFC822Name] + | type[RegisteredID] + | type[UniformResourceIdentifier], + ) -> ( + list[_IPAddressTypes] + | list[str] + | list[OtherName] + | list[Name] + | list[ObjectIdentifier] + ): + return self._general_names.get_values_for_type(type) + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, CertificateIssuer): + return NotImplemented + + return self._general_names == other._general_names + + def __hash__(self) -> int: + return hash(self._general_names) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class CRLReason(ExtensionType): + oid = CRLEntryExtensionOID.CRL_REASON + + def __init__(self, reason: ReasonFlags) -> None: + if not isinstance(reason, ReasonFlags): + raise TypeError("reason must be an element from ReasonFlags") + + self._reason = reason + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, CRLReason): + return NotImplemented + + return self.reason == other.reason + + def __hash__(self) -> int: + return hash(self.reason) + + @property + def reason(self) -> ReasonFlags: + return self._reason + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class InvalidityDate(ExtensionType): + oid = CRLEntryExtensionOID.INVALIDITY_DATE + + def __init__(self, invalidity_date: datetime.datetime) -> None: + if not isinstance(invalidity_date, datetime.datetime): + raise TypeError("invalidity_date must be a datetime.datetime") + + self._invalidity_date = invalidity_date + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, InvalidityDate): + return NotImplemented + + return self.invalidity_date == other.invalidity_date + + def __hash__(self) -> int: + return hash(self.invalidity_date) + + @property + def invalidity_date(self) -> datetime.datetime: + return self._invalidity_date + + @property + def invalidity_date_utc(self) -> datetime.datetime: + if self._invalidity_date.tzinfo is None: + return self._invalidity_date.replace(tzinfo=datetime.timezone.utc) + else: + return self._invalidity_date.astimezone(tz=datetime.timezone.utc) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class PrecertificateSignedCertificateTimestamps(ExtensionType): + oid = ExtensionOID.PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS + + def __init__( + self, + signed_certificate_timestamps: Iterable[SignedCertificateTimestamp], + ) -> None: + signed_certificate_timestamps = list(signed_certificate_timestamps) + if not all( + isinstance(sct, SignedCertificateTimestamp) + for sct in signed_certificate_timestamps + ): + raise TypeError( + "Every item in the signed_certificate_timestamps list must be " + "a SignedCertificateTimestamp" + ) + self._signed_certificate_timestamps = signed_certificate_timestamps + + __len__, __iter__, __getitem__ = _make_sequence_methods( + "_signed_certificate_timestamps" + ) + + def __repr__(self) -> str: + return f"" + + def __hash__(self) -> int: + return hash(tuple(self._signed_certificate_timestamps)) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, PrecertificateSignedCertificateTimestamps): + return NotImplemented + + return ( + self._signed_certificate_timestamps + == other._signed_certificate_timestamps + ) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class SignedCertificateTimestamps(ExtensionType): + oid = ExtensionOID.SIGNED_CERTIFICATE_TIMESTAMPS + + def __init__( + self, + signed_certificate_timestamps: Iterable[SignedCertificateTimestamp], + ) -> None: + signed_certificate_timestamps = list(signed_certificate_timestamps) + if not all( + isinstance(sct, SignedCertificateTimestamp) + for sct in signed_certificate_timestamps + ): + raise TypeError( + "Every item in the signed_certificate_timestamps list must be " + "a SignedCertificateTimestamp" + ) + self._signed_certificate_timestamps = signed_certificate_timestamps + + __len__, __iter__, __getitem__ = _make_sequence_methods( + "_signed_certificate_timestamps" + ) + + def __repr__(self) -> str: + return f"" + + def __hash__(self) -> int: + return hash(tuple(self._signed_certificate_timestamps)) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, SignedCertificateTimestamps): + return NotImplemented + + return ( + self._signed_certificate_timestamps + == other._signed_certificate_timestamps + ) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class OCSPNonce(ExtensionType): + oid = OCSPExtensionOID.NONCE + + def __init__(self, nonce: bytes) -> None: + if not isinstance(nonce, bytes): + raise TypeError("nonce must be bytes") + + self._nonce = nonce + + def __eq__(self, other: object) -> bool: + if not isinstance(other, OCSPNonce): + return NotImplemented + + return self.nonce == other.nonce + + def __hash__(self) -> int: + return hash(self.nonce) + + def __repr__(self) -> str: + return f"" + + @property + def nonce(self) -> bytes: + return self._nonce + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class OCSPAcceptableResponses(ExtensionType): + oid = OCSPExtensionOID.ACCEPTABLE_RESPONSES + + def __init__(self, responses: Iterable[ObjectIdentifier]) -> None: + responses = list(responses) + if any(not isinstance(r, ObjectIdentifier) for r in responses): + raise TypeError("All responses must be ObjectIdentifiers") + + self._responses = responses + + def __eq__(self, other: object) -> bool: + if not isinstance(other, OCSPAcceptableResponses): + return NotImplemented + + return self._responses == other._responses + + def __hash__(self) -> int: + return hash(tuple(self._responses)) + + def __repr__(self) -> str: + return f"" + + def __iter__(self) -> Iterator[ObjectIdentifier]: + return iter(self._responses) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class IssuingDistributionPoint(ExtensionType): + oid = ExtensionOID.ISSUING_DISTRIBUTION_POINT + + def __init__( + self, + full_name: Iterable[GeneralName] | None, + relative_name: RelativeDistinguishedName | None, + only_contains_user_certs: bool, + only_contains_ca_certs: bool, + only_some_reasons: frozenset[ReasonFlags] | None, + indirect_crl: bool, + only_contains_attribute_certs: bool, + ) -> None: + if full_name is not None: + full_name = list(full_name) + + if only_some_reasons and ( + not isinstance(only_some_reasons, frozenset) + or not all(isinstance(x, ReasonFlags) for x in only_some_reasons) + ): + raise TypeError( + "only_some_reasons must be None or frozenset of ReasonFlags" + ) + + if only_some_reasons and ( + ReasonFlags.unspecified in only_some_reasons + or ReasonFlags.remove_from_crl in only_some_reasons + ): + raise ValueError( + "unspecified and remove_from_crl are not valid reasons in an " + "IssuingDistributionPoint" + ) + + if not ( + isinstance(only_contains_user_certs, bool) + and isinstance(only_contains_ca_certs, bool) + and isinstance(indirect_crl, bool) + and isinstance(only_contains_attribute_certs, bool) + ): + raise TypeError( + "only_contains_user_certs, only_contains_ca_certs, " + "indirect_crl and only_contains_attribute_certs " + "must all be boolean." + ) + + # Per RFC5280 Section 5.2.5, the Issuing Distribution Point extension + # in a CRL can have only one of onlyContainsUserCerts, + # onlyContainsCACerts, onlyContainsAttributeCerts set to TRUE. + crl_constraints = [ + only_contains_user_certs, + only_contains_ca_certs, + only_contains_attribute_certs, + ] + + if len([x for x in crl_constraints if x]) > 1: + raise ValueError( + "Only one of the following can be set to True: " + "only_contains_user_certs, only_contains_ca_certs, " + "only_contains_attribute_certs" + ) + + if not any( + [ + only_contains_user_certs, + only_contains_ca_certs, + indirect_crl, + only_contains_attribute_certs, + full_name, + relative_name, + only_some_reasons, + ] + ): + raise ValueError( + "Cannot create empty extension: " + "if only_contains_user_certs, only_contains_ca_certs, " + "indirect_crl, and only_contains_attribute_certs are all False" + ", then either full_name, relative_name, or only_some_reasons " + "must have a value." + ) + + self._only_contains_user_certs = only_contains_user_certs + self._only_contains_ca_certs = only_contains_ca_certs + self._indirect_crl = indirect_crl + self._only_contains_attribute_certs = only_contains_attribute_certs + self._only_some_reasons = only_some_reasons + self._full_name = full_name + self._relative_name = relative_name + + def __repr__(self) -> str: + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, IssuingDistributionPoint): + return NotImplemented + + return ( + self.full_name == other.full_name + and self.relative_name == other.relative_name + and self.only_contains_user_certs == other.only_contains_user_certs + and self.only_contains_ca_certs == other.only_contains_ca_certs + and self.only_some_reasons == other.only_some_reasons + and self.indirect_crl == other.indirect_crl + and self.only_contains_attribute_certs + == other.only_contains_attribute_certs + ) + + def __hash__(self) -> int: + return hash( + ( + self.full_name, + self.relative_name, + self.only_contains_user_certs, + self.only_contains_ca_certs, + self.only_some_reasons, + self.indirect_crl, + self.only_contains_attribute_certs, + ) + ) + + @property + def full_name(self) -> list[GeneralName] | None: + return self._full_name + + @property + def relative_name(self) -> RelativeDistinguishedName | None: + return self._relative_name + + @property + def only_contains_user_certs(self) -> bool: + return self._only_contains_user_certs + + @property + def only_contains_ca_certs(self) -> bool: + return self._only_contains_ca_certs + + @property + def only_some_reasons( + self, + ) -> frozenset[ReasonFlags] | None: + return self._only_some_reasons + + @property + def indirect_crl(self) -> bool: + return self._indirect_crl + + @property + def only_contains_attribute_certs(self) -> bool: + return self._only_contains_attribute_certs + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class MSCertificateTemplate(ExtensionType): + oid = ExtensionOID.MS_CERTIFICATE_TEMPLATE + + def __init__( + self, + template_id: ObjectIdentifier, + major_version: int | None, + minor_version: int | None, + ) -> None: + if not isinstance(template_id, ObjectIdentifier): + raise TypeError("oid must be an ObjectIdentifier") + self._template_id = template_id + if ( + major_version is not None and not isinstance(major_version, int) + ) or ( + minor_version is not None and not isinstance(minor_version, int) + ): + raise TypeError( + "major_version and minor_version must be integers or None" + ) + self._major_version = major_version + self._minor_version = minor_version + + @property + def template_id(self) -> ObjectIdentifier: + return self._template_id + + @property + def major_version(self) -> int | None: + return self._major_version + + @property + def minor_version(self) -> int | None: + return self._minor_version + + def __repr__(self) -> str: + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, MSCertificateTemplate): + return NotImplemented + + return ( + self.template_id == other.template_id + and self.major_version == other.major_version + and self.minor_version == other.minor_version + ) + + def __hash__(self) -> int: + return hash((self.template_id, self.major_version, self.minor_version)) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class NamingAuthority: + def __init__( + self, + id: ObjectIdentifier | None, + url: str | None, + text: str | None, + ) -> None: + if id is not None and not isinstance(id, ObjectIdentifier): + raise TypeError("id must be an ObjectIdentifier") + + if url is not None and not isinstance(url, str): + raise TypeError("url must be a str") + + if text is not None and not isinstance(text, str): + raise TypeError("text must be a str") + + self._id = id + self._url = url + self._text = text + + @property + def id(self) -> ObjectIdentifier | None: + return self._id + + @property + def url(self) -> str | None: + return self._url + + @property + def text(self) -> str | None: + return self._text + + def __repr__(self) -> str: + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, NamingAuthority): + return NotImplemented + + return ( + self.id == other.id + and self.url == other.url + and self.text == other.text + ) + + def __hash__(self) -> int: + return hash( + ( + self.id, + self.url, + self.text, + ) + ) + + +class ProfessionInfo: + def __init__( + self, + naming_authority: NamingAuthority | None, + profession_items: Iterable[str], + profession_oids: Iterable[ObjectIdentifier] | None, + registration_number: str | None, + add_profession_info: bytes | None, + ) -> None: + if naming_authority is not None and not isinstance( + naming_authority, NamingAuthority + ): + raise TypeError("naming_authority must be a NamingAuthority") + + profession_items = list(profession_items) + if not all(isinstance(item, str) for item in profession_items): + raise TypeError( + "Every item in the profession_items list must be a str" + ) + + if profession_oids is not None: + profession_oids = list(profession_oids) + if not all( + isinstance(oid, ObjectIdentifier) for oid in profession_oids + ): + raise TypeError( + "Every item in the profession_oids list must be an " + "ObjectIdentifier" + ) + + if registration_number is not None and not isinstance( + registration_number, str + ): + raise TypeError("registration_number must be a str") + + if add_profession_info is not None and not isinstance( + add_profession_info, bytes + ): + raise TypeError("add_profession_info must be bytes") + + self._naming_authority = naming_authority + self._profession_items = profession_items + self._profession_oids = profession_oids + self._registration_number = registration_number + self._add_profession_info = add_profession_info + + @property + def naming_authority(self) -> NamingAuthority | None: + return self._naming_authority + + @property + def profession_items(self) -> list[str]: + return self._profession_items + + @property + def profession_oids(self) -> list[ObjectIdentifier] | None: + return self._profession_oids + + @property + def registration_number(self) -> str | None: + return self._registration_number + + @property + def add_profession_info(self) -> bytes | None: + return self._add_profession_info + + def __repr__(self) -> str: + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, ProfessionInfo): + return NotImplemented + + return ( + self.naming_authority == other.naming_authority + and self.profession_items == other.profession_items + and self.profession_oids == other.profession_oids + and self.registration_number == other.registration_number + and self.add_profession_info == other.add_profession_info + ) + + def __hash__(self) -> int: + if self.profession_oids is not None: + profession_oids = tuple(self.profession_oids) + else: + profession_oids = None + return hash( + ( + self.naming_authority, + tuple(self.profession_items), + profession_oids, + self.registration_number, + self.add_profession_info, + ) + ) + + +class Admission: + def __init__( + self, + admission_authority: GeneralName | None, + naming_authority: NamingAuthority | None, + profession_infos: Iterable[ProfessionInfo], + ) -> None: + if admission_authority is not None and not isinstance( + admission_authority, GeneralName + ): + raise TypeError("admission_authority must be a GeneralName") + + if naming_authority is not None and not isinstance( + naming_authority, NamingAuthority + ): + raise TypeError("naming_authority must be a NamingAuthority") + + profession_infos = list(profession_infos) + if not all( + isinstance(info, ProfessionInfo) for info in profession_infos + ): + raise TypeError( + "Every item in the profession_infos list must be a " + "ProfessionInfo" + ) + + self._admission_authority = admission_authority + self._naming_authority = naming_authority + self._profession_infos = profession_infos + + @property + def admission_authority(self) -> GeneralName | None: + return self._admission_authority + + @property + def naming_authority(self) -> NamingAuthority | None: + return self._naming_authority + + @property + def profession_infos(self) -> list[ProfessionInfo]: + return self._profession_infos + + def __repr__(self) -> str: + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Admission): + return NotImplemented + + return ( + self.admission_authority == other.admission_authority + and self.naming_authority == other.naming_authority + and self.profession_infos == other.profession_infos + ) + + def __hash__(self) -> int: + return hash( + ( + self.admission_authority, + self.naming_authority, + tuple(self.profession_infos), + ) + ) + + +class Admissions(ExtensionType): + oid = ExtensionOID.ADMISSIONS + + def __init__( + self, + authority: GeneralName | None, + admissions: Iterable[Admission], + ) -> None: + if authority is not None and not isinstance(authority, GeneralName): + raise TypeError("authority must be a GeneralName") + + admissions = list(admissions) + if not all( + isinstance(admission, Admission) for admission in admissions + ): + raise TypeError( + "Every item in the contents_of_admissions list must be an " + "Admission" + ) + + self._authority = authority + self._admissions = admissions + + __len__, __iter__, __getitem__ = _make_sequence_methods("_admissions") + + @property + def authority(self) -> GeneralName | None: + return self._authority + + def __repr__(self) -> str: + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Admissions): + return NotImplemented + + return ( + self.authority == other.authority + and self._admissions == other._admissions + ) + + def __hash__(self) -> int: + return hash((self.authority, tuple(self._admissions))) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class UnrecognizedExtension(ExtensionType): + def __init__(self, oid: ObjectIdentifier, value: bytes) -> None: + if not isinstance(oid, ObjectIdentifier): + raise TypeError("oid must be an ObjectIdentifier") + self._oid = oid + self._value = value + + @property + def oid(self) -> ObjectIdentifier: # type: ignore[override] + return self._oid + + @property + def value(self) -> bytes: + return self._value + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, UnrecognizedExtension): + return NotImplemented + + return self.oid == other.oid and self.value == other.value + + def __hash__(self) -> int: + return hash((self.oid, self.value)) + + def public_bytes(self) -> bytes: + return self.value diff --git a/venv/lib/python3.12/site-packages/cryptography/x509/general_name.py b/venv/lib/python3.12/site-packages/cryptography/x509/general_name.py new file mode 100644 index 00000000..672f2875 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/x509/general_name.py @@ -0,0 +1,281 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc +import ipaddress +import typing +from email.utils import parseaddr + +from cryptography.x509.name import Name +from cryptography.x509.oid import ObjectIdentifier + +_IPAddressTypes = typing.Union[ + ipaddress.IPv4Address, + ipaddress.IPv6Address, + ipaddress.IPv4Network, + ipaddress.IPv6Network, +] + + +class UnsupportedGeneralNameType(Exception): + pass + + +class GeneralName(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def value(self) -> typing.Any: + """ + Return the value of the object + """ + + +class RFC822Name(GeneralName): + def __init__(self, value: str) -> None: + if isinstance(value, str): + try: + value.encode("ascii") + except UnicodeEncodeError: + raise ValueError( + "RFC822Name values should be passed as an A-label string. " + "This means unicode characters should be encoded via " + "a library like idna." + ) + else: + raise TypeError("value must be string") + + name, address = parseaddr(value) + if name or not address: + # parseaddr has found a name (e.g. Name ) or the entire + # value is an empty string. + raise ValueError("Invalid rfc822name value") + + self._value = value + + @property + def value(self) -> str: + return self._value + + @classmethod + def _init_without_validation(cls, value: str) -> RFC822Name: + instance = cls.__new__(cls) + instance._value = value + return instance + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, RFC822Name): + return NotImplemented + + return self.value == other.value + + def __hash__(self) -> int: + return hash(self.value) + + +class DNSName(GeneralName): + def __init__(self, value: str) -> None: + if isinstance(value, str): + try: + value.encode("ascii") + except UnicodeEncodeError: + raise ValueError( + "DNSName values should be passed as an A-label string. " + "This means unicode characters should be encoded via " + "a library like idna." + ) + else: + raise TypeError("value must be string") + + self._value = value + + @property + def value(self) -> str: + return self._value + + @classmethod + def _init_without_validation(cls, value: str) -> DNSName: + instance = cls.__new__(cls) + instance._value = value + return instance + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, DNSName): + return NotImplemented + + return self.value == other.value + + def __hash__(self) -> int: + return hash(self.value) + + +class UniformResourceIdentifier(GeneralName): + def __init__(self, value: str) -> None: + if isinstance(value, str): + try: + value.encode("ascii") + except UnicodeEncodeError: + raise ValueError( + "URI values should be passed as an A-label string. " + "This means unicode characters should be encoded via " + "a library like idna." + ) + else: + raise TypeError("value must be string") + + self._value = value + + @property + def value(self) -> str: + return self._value + + @classmethod + def _init_without_validation(cls, value: str) -> UniformResourceIdentifier: + instance = cls.__new__(cls) + instance._value = value + return instance + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, UniformResourceIdentifier): + return NotImplemented + + return self.value == other.value + + def __hash__(self) -> int: + return hash(self.value) + + +class DirectoryName(GeneralName): + def __init__(self, value: Name) -> None: + if not isinstance(value, Name): + raise TypeError("value must be a Name") + + self._value = value + + @property + def value(self) -> Name: + return self._value + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, DirectoryName): + return NotImplemented + + return self.value == other.value + + def __hash__(self) -> int: + return hash(self.value) + + +class RegisteredID(GeneralName): + def __init__(self, value: ObjectIdentifier) -> None: + if not isinstance(value, ObjectIdentifier): + raise TypeError("value must be an ObjectIdentifier") + + self._value = value + + @property + def value(self) -> ObjectIdentifier: + return self._value + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, RegisteredID): + return NotImplemented + + return self.value == other.value + + def __hash__(self) -> int: + return hash(self.value) + + +class IPAddress(GeneralName): + def __init__(self, value: _IPAddressTypes) -> None: + if not isinstance( + value, + ( + ipaddress.IPv4Address, + ipaddress.IPv6Address, + ipaddress.IPv4Network, + ipaddress.IPv6Network, + ), + ): + raise TypeError( + "value must be an instance of ipaddress.IPv4Address, " + "ipaddress.IPv6Address, ipaddress.IPv4Network, or " + "ipaddress.IPv6Network" + ) + + self._value = value + + @property + def value(self) -> _IPAddressTypes: + return self._value + + def _packed(self) -> bytes: + if isinstance( + self.value, (ipaddress.IPv4Address, ipaddress.IPv6Address) + ): + return self.value.packed + else: + return ( + self.value.network_address.packed + self.value.netmask.packed + ) + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, IPAddress): + return NotImplemented + + return self.value == other.value + + def __hash__(self) -> int: + return hash(self.value) + + +class OtherName(GeneralName): + def __init__(self, type_id: ObjectIdentifier, value: bytes) -> None: + if not isinstance(type_id, ObjectIdentifier): + raise TypeError("type_id must be an ObjectIdentifier") + if not isinstance(value, bytes): + raise TypeError("value must be a binary string") + + self._type_id = type_id + self._value = value + + @property + def type_id(self) -> ObjectIdentifier: + return self._type_id + + @property + def value(self) -> bytes: + return self._value + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, OtherName): + return NotImplemented + + return self.type_id == other.type_id and self.value == other.value + + def __hash__(self) -> int: + return hash((self.type_id, self.value)) diff --git a/venv/lib/python3.12/site-packages/cryptography/x509/name.py b/venv/lib/python3.12/site-packages/cryptography/x509/name.py new file mode 100644 index 00000000..685f921d --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/x509/name.py @@ -0,0 +1,476 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import binascii +import re +import sys +import typing +import warnings +from collections.abc import Iterable, Iterator + +from cryptography import utils +from cryptography.hazmat.bindings._rust import x509 as rust_x509 +from cryptography.x509.oid import NameOID, ObjectIdentifier + + +class _ASN1Type(utils.Enum): + BitString = 3 + OctetString = 4 + UTF8String = 12 + NumericString = 18 + PrintableString = 19 + T61String = 20 + IA5String = 22 + UTCTime = 23 + GeneralizedTime = 24 + VisibleString = 26 + UniversalString = 28 + BMPString = 30 + + +_ASN1_TYPE_TO_ENUM = {i.value: i for i in _ASN1Type} +_NAMEOID_DEFAULT_TYPE: dict[ObjectIdentifier, _ASN1Type] = { + NameOID.COUNTRY_NAME: _ASN1Type.PrintableString, + NameOID.JURISDICTION_COUNTRY_NAME: _ASN1Type.PrintableString, + NameOID.SERIAL_NUMBER: _ASN1Type.PrintableString, + NameOID.DN_QUALIFIER: _ASN1Type.PrintableString, + NameOID.EMAIL_ADDRESS: _ASN1Type.IA5String, + NameOID.DOMAIN_COMPONENT: _ASN1Type.IA5String, +} + +# Type alias +_OidNameMap = typing.Mapping[ObjectIdentifier, str] +_NameOidMap = typing.Mapping[str, ObjectIdentifier] + +#: Short attribute names from RFC 4514: +#: https://tools.ietf.org/html/rfc4514#page-7 +_NAMEOID_TO_NAME: _OidNameMap = { + NameOID.COMMON_NAME: "CN", + NameOID.LOCALITY_NAME: "L", + NameOID.STATE_OR_PROVINCE_NAME: "ST", + NameOID.ORGANIZATION_NAME: "O", + NameOID.ORGANIZATIONAL_UNIT_NAME: "OU", + NameOID.COUNTRY_NAME: "C", + NameOID.STREET_ADDRESS: "STREET", + NameOID.DOMAIN_COMPONENT: "DC", + NameOID.USER_ID: "UID", +} +_NAME_TO_NAMEOID = {v: k for k, v in _NAMEOID_TO_NAME.items()} + +_NAMEOID_LENGTH_LIMIT = { + NameOID.COUNTRY_NAME: (2, 2), + NameOID.JURISDICTION_COUNTRY_NAME: (2, 2), + NameOID.COMMON_NAME: (1, 64), +} + + +def _escape_dn_value(val: str | bytes) -> str: + """Escape special characters in RFC4514 Distinguished Name value.""" + + if not val: + return "" + + # RFC 4514 Section 2.4 defines the value as being the # (U+0023) character + # followed by the hexadecimal encoding of the octets. + if isinstance(val, bytes): + return "#" + binascii.hexlify(val).decode("utf8") + + # See https://tools.ietf.org/html/rfc4514#section-2.4 + val = val.replace("\\", "\\\\") + val = val.replace('"', '\\"') + val = val.replace("+", "\\+") + val = val.replace(",", "\\,") + val = val.replace(";", "\\;") + val = val.replace("<", "\\<") + val = val.replace(">", "\\>") + val = val.replace("\0", "\\00") + + if val[0] in ("#", " "): + val = "\\" + val + if val[-1] == " ": + val = val[:-1] + "\\ " + + return val + + +def _unescape_dn_value(val: str) -> str: + if not val: + return "" + + # See https://tools.ietf.org/html/rfc4514#section-3 + + # special = escaped / SPACE / SHARP / EQUALS + # escaped = DQUOTE / PLUS / COMMA / SEMI / LANGLE / RANGLE + def sub(m): + val = m.group(1) + # Regular escape + if len(val) == 1: + return val + # Hex-value scape + return chr(int(val, 16)) + + return _RFC4514NameParser._PAIR_RE.sub(sub, val) + + +NameAttributeValueType = typing.TypeVar( + "NameAttributeValueType", + typing.Union[str, bytes], + str, + bytes, + covariant=True, +) + + +class NameAttribute(typing.Generic[NameAttributeValueType]): + def __init__( + self, + oid: ObjectIdentifier, + value: NameAttributeValueType, + _type: _ASN1Type | None = None, + *, + _validate: bool = True, + ) -> None: + if not isinstance(oid, ObjectIdentifier): + raise TypeError( + "oid argument must be an ObjectIdentifier instance." + ) + if _type == _ASN1Type.BitString: + if oid != NameOID.X500_UNIQUE_IDENTIFIER: + raise TypeError( + "oid must be X500_UNIQUE_IDENTIFIER for BitString type." + ) + if not isinstance(value, bytes): + raise TypeError("value must be bytes for BitString") + elif not isinstance(value, str): + raise TypeError("value argument must be a str") + + length_limits = _NAMEOID_LENGTH_LIMIT.get(oid) + if length_limits is not None: + min_length, max_length = length_limits + assert isinstance(value, str) + c_len = len(value.encode("utf8")) + if c_len < min_length or c_len > max_length: + msg = ( + f"Attribute's length must be >= {min_length} and " + f"<= {max_length}, but it was {c_len}" + ) + if _validate is True: + raise ValueError(msg) + else: + warnings.warn(msg, stacklevel=2) + + # The appropriate ASN1 string type varies by OID and is defined across + # multiple RFCs including 2459, 3280, and 5280. In general UTF8String + # is preferred (2459), but 3280 and 5280 specify several OIDs with + # alternate types. This means when we see the sentinel value we need + # to look up whether the OID has a non-UTF8 type. If it does, set it + # to that. Otherwise, UTF8! + if _type is None: + _type = _NAMEOID_DEFAULT_TYPE.get(oid, _ASN1Type.UTF8String) + + if not isinstance(_type, _ASN1Type): + raise TypeError("_type must be from the _ASN1Type enum") + + self._oid = oid + self._value: NameAttributeValueType = value + self._type: _ASN1Type = _type + + @property + def oid(self) -> ObjectIdentifier: + return self._oid + + @property + def value(self) -> NameAttributeValueType: + return self._value + + @property + def rfc4514_attribute_name(self) -> str: + """ + The short attribute name (for example "CN") if available, + otherwise the OID dotted string. + """ + return _NAMEOID_TO_NAME.get(self.oid, self.oid.dotted_string) + + def rfc4514_string( + self, attr_name_overrides: _OidNameMap | None = None + ) -> str: + """ + Format as RFC4514 Distinguished Name string. + + Use short attribute name if available, otherwise fall back to OID + dotted string. + """ + attr_name = ( + attr_name_overrides.get(self.oid) if attr_name_overrides else None + ) + if attr_name is None: + attr_name = self.rfc4514_attribute_name + + return f"{attr_name}={_escape_dn_value(self.value)}" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, NameAttribute): + return NotImplemented + + return self.oid == other.oid and self.value == other.value + + def __hash__(self) -> int: + return hash((self.oid, self.value)) + + def __repr__(self) -> str: + return f"" + + +class RelativeDistinguishedName: + def __init__(self, attributes: Iterable[NameAttribute]): + attributes = list(attributes) + if not attributes: + raise ValueError("a relative distinguished name cannot be empty") + if not all(isinstance(x, NameAttribute) for x in attributes): + raise TypeError("attributes must be an iterable of NameAttribute") + + # Keep list and frozenset to preserve attribute order where it matters + self._attributes = attributes + self._attribute_set = frozenset(attributes) + + if len(self._attribute_set) != len(attributes): + raise ValueError("duplicate attributes are not allowed") + + def get_attributes_for_oid( + self, + oid: ObjectIdentifier, + ) -> list[NameAttribute[str | bytes]]: + return [i for i in self if i.oid == oid] + + def rfc4514_string( + self, attr_name_overrides: _OidNameMap | None = None + ) -> str: + """ + Format as RFC4514 Distinguished Name string. + + Within each RDN, attributes are joined by '+', although that is rarely + used in certificates. + """ + return "+".join( + attr.rfc4514_string(attr_name_overrides) + for attr in self._attributes + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, RelativeDistinguishedName): + return NotImplemented + + return self._attribute_set == other._attribute_set + + def __hash__(self) -> int: + return hash(self._attribute_set) + + def __iter__(self) -> Iterator[NameAttribute]: + return iter(self._attributes) + + def __len__(self) -> int: + return len(self._attributes) + + def __repr__(self) -> str: + return f"" + + +class Name: + @typing.overload + def __init__(self, attributes: Iterable[NameAttribute]) -> None: ... + + @typing.overload + def __init__( + self, attributes: Iterable[RelativeDistinguishedName] + ) -> None: ... + + def __init__( + self, + attributes: Iterable[NameAttribute | RelativeDistinguishedName], + ) -> None: + attributes = list(attributes) + if all(isinstance(x, NameAttribute) for x in attributes): + self._attributes = [ + RelativeDistinguishedName([typing.cast(NameAttribute, x)]) + for x in attributes + ] + elif all(isinstance(x, RelativeDistinguishedName) for x in attributes): + self._attributes = typing.cast( + typing.List[RelativeDistinguishedName], attributes + ) + else: + raise TypeError( + "attributes must be a list of NameAttribute" + " or a list RelativeDistinguishedName" + ) + + @classmethod + def from_rfc4514_string( + cls, + data: str, + attr_name_overrides: _NameOidMap | None = None, + ) -> Name: + return _RFC4514NameParser(data, attr_name_overrides or {}).parse() + + def rfc4514_string( + self, attr_name_overrides: _OidNameMap | None = None + ) -> str: + """ + Format as RFC4514 Distinguished Name string. + For example 'CN=foobar.com,O=Foo Corp,C=US' + + An X.509 name is a two-level structure: a list of sets of attributes. + Each list element is separated by ',' and within each list element, set + elements are separated by '+'. The latter is almost never used in + real world certificates. According to RFC4514 section 2.1 the + RDNSequence must be reversed when converting to string representation. + """ + return ",".join( + attr.rfc4514_string(attr_name_overrides) + for attr in reversed(self._attributes) + ) + + def get_attributes_for_oid( + self, + oid: ObjectIdentifier, + ) -> list[NameAttribute[str | bytes]]: + return [i for i in self if i.oid == oid] + + @property + def rdns(self) -> list[RelativeDistinguishedName]: + return self._attributes + + def public_bytes(self, backend: typing.Any = None) -> bytes: + return rust_x509.encode_name_bytes(self) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Name): + return NotImplemented + + return self._attributes == other._attributes + + def __hash__(self) -> int: + # TODO: this is relatively expensive, if this looks like a bottleneck + # for you, consider optimizing! + return hash(tuple(self._attributes)) + + def __iter__(self) -> Iterator[NameAttribute]: + for rdn in self._attributes: + yield from rdn + + def __len__(self) -> int: + return sum(len(rdn) for rdn in self._attributes) + + def __repr__(self) -> str: + rdns = ",".join(attr.rfc4514_string() for attr in self._attributes) + return f"" + + +class _RFC4514NameParser: + _OID_RE = re.compile(r"(0|([1-9]\d*))(\.(0|([1-9]\d*)))+") + _DESCR_RE = re.compile(r"[a-zA-Z][a-zA-Z\d-]*") + + _PAIR = r"\\([\\ #=\"\+,;<>]|[\da-zA-Z]{2})" + _PAIR_RE = re.compile(_PAIR) + _LUTF1 = r"[\x01-\x1f\x21\x24-\x2A\x2D-\x3A\x3D\x3F-\x5B\x5D-\x7F]" + _SUTF1 = r"[\x01-\x21\x23-\x2A\x2D-\x3A\x3D\x3F-\x5B\x5D-\x7F]" + _TUTF1 = r"[\x01-\x1F\x21\x23-\x2A\x2D-\x3A\x3D\x3F-\x5B\x5D-\x7F]" + _UTFMB = rf"[\x80-{chr(sys.maxunicode)}]" + _LEADCHAR = rf"{_LUTF1}|{_UTFMB}" + _STRINGCHAR = rf"{_SUTF1}|{_UTFMB}" + _TRAILCHAR = rf"{_TUTF1}|{_UTFMB}" + _STRING_RE = re.compile( + rf""" + ( + ({_LEADCHAR}|{_PAIR}) + ( + ({_STRINGCHAR}|{_PAIR})* + ({_TRAILCHAR}|{_PAIR}) + )? + )? + """, + re.VERBOSE, + ) + _HEXSTRING_RE = re.compile(r"#([\da-zA-Z]{2})+") + + def __init__(self, data: str, attr_name_overrides: _NameOidMap) -> None: + self._data = data + self._idx = 0 + + self._attr_name_overrides = attr_name_overrides + + def _has_data(self) -> bool: + return self._idx < len(self._data) + + def _peek(self) -> str | None: + if self._has_data(): + return self._data[self._idx] + return None + + def _read_char(self, ch: str) -> None: + if self._peek() != ch: + raise ValueError + self._idx += 1 + + def _read_re(self, pat) -> str: + match = pat.match(self._data, pos=self._idx) + if match is None: + raise ValueError + val = match.group() + self._idx += len(val) + return val + + def parse(self) -> Name: + """ + Parses the `data` string and converts it to a Name. + + According to RFC4514 section 2.1 the RDNSequence must be + reversed when converting to string representation. So, when + we parse it, we need to reverse again to get the RDNs on the + correct order. + """ + + if not self._has_data(): + return Name([]) + + rdns = [self._parse_rdn()] + + while self._has_data(): + self._read_char(",") + rdns.append(self._parse_rdn()) + + return Name(reversed(rdns)) + + def _parse_rdn(self) -> RelativeDistinguishedName: + nas = [self._parse_na()] + while self._peek() == "+": + self._read_char("+") + nas.append(self._parse_na()) + + return RelativeDistinguishedName(nas) + + def _parse_na(self) -> NameAttribute: + try: + oid_value = self._read_re(self._OID_RE) + except ValueError: + name = self._read_re(self._DESCR_RE) + oid = self._attr_name_overrides.get( + name, _NAME_TO_NAMEOID.get(name) + ) + if oid is None: + raise ValueError + else: + oid = ObjectIdentifier(oid_value) + + self._read_char("=") + if self._peek() == "#": + value = self._read_re(self._HEXSTRING_RE) + value = binascii.unhexlify(value[1:]).decode() + else: + raw_value = self._read_re(self._STRING_RE) + value = _unescape_dn_value(raw_value) + + return NameAttribute(oid, value) diff --git a/venv/lib/python3.12/site-packages/cryptography/x509/ocsp.py b/venv/lib/python3.12/site-packages/cryptography/x509/ocsp.py new file mode 100644 index 00000000..f61ed80b --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/x509/ocsp.py @@ -0,0 +1,379 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import datetime +from collections.abc import Iterable + +from cryptography import utils, x509 +from cryptography.hazmat.bindings._rust import ocsp +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives.asymmetric.types import ( + CertificateIssuerPrivateKeyTypes, +) +from cryptography.x509.base import _reject_duplicate_extension + + +class OCSPResponderEncoding(utils.Enum): + HASH = "By Hash" + NAME = "By Name" + + +class OCSPResponseStatus(utils.Enum): + SUCCESSFUL = 0 + MALFORMED_REQUEST = 1 + INTERNAL_ERROR = 2 + TRY_LATER = 3 + SIG_REQUIRED = 5 + UNAUTHORIZED = 6 + + +_ALLOWED_HASHES = ( + hashes.SHA1, + hashes.SHA224, + hashes.SHA256, + hashes.SHA384, + hashes.SHA512, +) + + +def _verify_algorithm(algorithm: hashes.HashAlgorithm) -> None: + if not isinstance(algorithm, _ALLOWED_HASHES): + raise ValueError( + "Algorithm must be SHA1, SHA224, SHA256, SHA384, or SHA512" + ) + + +class OCSPCertStatus(utils.Enum): + GOOD = 0 + REVOKED = 1 + UNKNOWN = 2 + + +class _SingleResponse: + def __init__( + self, + resp: tuple[x509.Certificate, x509.Certificate] | None, + resp_hash: tuple[bytes, bytes, int] | None, + algorithm: hashes.HashAlgorithm, + cert_status: OCSPCertStatus, + this_update: datetime.datetime, + next_update: datetime.datetime | None, + revocation_time: datetime.datetime | None, + revocation_reason: x509.ReasonFlags | None, + ): + _verify_algorithm(algorithm) + if not isinstance(this_update, datetime.datetime): + raise TypeError("this_update must be a datetime object") + if next_update is not None and not isinstance( + next_update, datetime.datetime + ): + raise TypeError("next_update must be a datetime object or None") + + self._resp = resp + self._resp_hash = resp_hash + self._algorithm = algorithm + self._this_update = this_update + self._next_update = next_update + + if not isinstance(cert_status, OCSPCertStatus): + raise TypeError( + "cert_status must be an item from the OCSPCertStatus enum" + ) + if cert_status is not OCSPCertStatus.REVOKED: + if revocation_time is not None: + raise ValueError( + "revocation_time can only be provided if the certificate " + "is revoked" + ) + if revocation_reason is not None: + raise ValueError( + "revocation_reason can only be provided if the certificate" + " is revoked" + ) + else: + if not isinstance(revocation_time, datetime.datetime): + raise TypeError("revocation_time must be a datetime object") + + if revocation_reason is not None and not isinstance( + revocation_reason, x509.ReasonFlags + ): + raise TypeError( + "revocation_reason must be an item from the ReasonFlags " + "enum or None" + ) + + self._cert_status = cert_status + self._revocation_time = revocation_time + self._revocation_reason = revocation_reason + + +OCSPRequest = ocsp.OCSPRequest +OCSPResponse = ocsp.OCSPResponse +OCSPSingleResponse = ocsp.OCSPSingleResponse + + +class OCSPRequestBuilder: + def __init__( + self, + request: tuple[ + x509.Certificate, x509.Certificate, hashes.HashAlgorithm + ] + | None = None, + request_hash: tuple[bytes, bytes, int, hashes.HashAlgorithm] + | None = None, + extensions: list[x509.Extension[x509.ExtensionType]] = [], + ) -> None: + self._request = request + self._request_hash = request_hash + self._extensions = extensions + + def add_certificate( + self, + cert: x509.Certificate, + issuer: x509.Certificate, + algorithm: hashes.HashAlgorithm, + ) -> OCSPRequestBuilder: + if self._request is not None or self._request_hash is not None: + raise ValueError("Only one certificate can be added to a request") + + _verify_algorithm(algorithm) + if not isinstance(cert, x509.Certificate) or not isinstance( + issuer, x509.Certificate + ): + raise TypeError("cert and issuer must be a Certificate") + + return OCSPRequestBuilder( + (cert, issuer, algorithm), self._request_hash, self._extensions + ) + + def add_certificate_by_hash( + self, + issuer_name_hash: bytes, + issuer_key_hash: bytes, + serial_number: int, + algorithm: hashes.HashAlgorithm, + ) -> OCSPRequestBuilder: + if self._request is not None or self._request_hash is not None: + raise ValueError("Only one certificate can be added to a request") + + if not isinstance(serial_number, int): + raise TypeError("serial_number must be an integer") + + _verify_algorithm(algorithm) + utils._check_bytes("issuer_name_hash", issuer_name_hash) + utils._check_bytes("issuer_key_hash", issuer_key_hash) + if algorithm.digest_size != len( + issuer_name_hash + ) or algorithm.digest_size != len(issuer_key_hash): + raise ValueError( + "issuer_name_hash and issuer_key_hash must be the same length " + "as the digest size of the algorithm" + ) + + return OCSPRequestBuilder( + self._request, + (issuer_name_hash, issuer_key_hash, serial_number, algorithm), + self._extensions, + ) + + def add_extension( + self, extval: x509.ExtensionType, critical: bool + ) -> OCSPRequestBuilder: + if not isinstance(extval, x509.ExtensionType): + raise TypeError("extension must be an ExtensionType") + + extension = x509.Extension(extval.oid, critical, extval) + _reject_duplicate_extension(extension, self._extensions) + + return OCSPRequestBuilder( + self._request, self._request_hash, [*self._extensions, extension] + ) + + def build(self) -> OCSPRequest: + if self._request is None and self._request_hash is None: + raise ValueError("You must add a certificate before building") + + return ocsp.create_ocsp_request(self) + + +class OCSPResponseBuilder: + def __init__( + self, + response: _SingleResponse | None = None, + responder_id: tuple[x509.Certificate, OCSPResponderEncoding] + | None = None, + certs: list[x509.Certificate] | None = None, + extensions: list[x509.Extension[x509.ExtensionType]] = [], + ): + self._response = response + self._responder_id = responder_id + self._certs = certs + self._extensions = extensions + + def add_response( + self, + cert: x509.Certificate, + issuer: x509.Certificate, + algorithm: hashes.HashAlgorithm, + cert_status: OCSPCertStatus, + this_update: datetime.datetime, + next_update: datetime.datetime | None, + revocation_time: datetime.datetime | None, + revocation_reason: x509.ReasonFlags | None, + ) -> OCSPResponseBuilder: + if self._response is not None: + raise ValueError("Only one response per OCSPResponse.") + + if not isinstance(cert, x509.Certificate) or not isinstance( + issuer, x509.Certificate + ): + raise TypeError("cert and issuer must be a Certificate") + + singleresp = _SingleResponse( + (cert, issuer), + None, + algorithm, + cert_status, + this_update, + next_update, + revocation_time, + revocation_reason, + ) + return OCSPResponseBuilder( + singleresp, + self._responder_id, + self._certs, + self._extensions, + ) + + def add_response_by_hash( + self, + issuer_name_hash: bytes, + issuer_key_hash: bytes, + serial_number: int, + algorithm: hashes.HashAlgorithm, + cert_status: OCSPCertStatus, + this_update: datetime.datetime, + next_update: datetime.datetime | None, + revocation_time: datetime.datetime | None, + revocation_reason: x509.ReasonFlags | None, + ) -> OCSPResponseBuilder: + if self._response is not None: + raise ValueError("Only one response per OCSPResponse.") + + if not isinstance(serial_number, int): + raise TypeError("serial_number must be an integer") + + utils._check_bytes("issuer_name_hash", issuer_name_hash) + utils._check_bytes("issuer_key_hash", issuer_key_hash) + _verify_algorithm(algorithm) + if algorithm.digest_size != len( + issuer_name_hash + ) or algorithm.digest_size != len(issuer_key_hash): + raise ValueError( + "issuer_name_hash and issuer_key_hash must be the same length " + "as the digest size of the algorithm" + ) + + singleresp = _SingleResponse( + None, + (issuer_name_hash, issuer_key_hash, serial_number), + algorithm, + cert_status, + this_update, + next_update, + revocation_time, + revocation_reason, + ) + return OCSPResponseBuilder( + singleresp, + self._responder_id, + self._certs, + self._extensions, + ) + + def responder_id( + self, encoding: OCSPResponderEncoding, responder_cert: x509.Certificate + ) -> OCSPResponseBuilder: + if self._responder_id is not None: + raise ValueError("responder_id can only be set once") + if not isinstance(responder_cert, x509.Certificate): + raise TypeError("responder_cert must be a Certificate") + if not isinstance(encoding, OCSPResponderEncoding): + raise TypeError( + "encoding must be an element from OCSPResponderEncoding" + ) + + return OCSPResponseBuilder( + self._response, + (responder_cert, encoding), + self._certs, + self._extensions, + ) + + def certificates( + self, certs: Iterable[x509.Certificate] + ) -> OCSPResponseBuilder: + if self._certs is not None: + raise ValueError("certificates may only be set once") + certs = list(certs) + if len(certs) == 0: + raise ValueError("certs must not be an empty list") + if not all(isinstance(x, x509.Certificate) for x in certs): + raise TypeError("certs must be a list of Certificates") + return OCSPResponseBuilder( + self._response, + self._responder_id, + certs, + self._extensions, + ) + + def add_extension( + self, extval: x509.ExtensionType, critical: bool + ) -> OCSPResponseBuilder: + if not isinstance(extval, x509.ExtensionType): + raise TypeError("extension must be an ExtensionType") + + extension = x509.Extension(extval.oid, critical, extval) + _reject_duplicate_extension(extension, self._extensions) + + return OCSPResponseBuilder( + self._response, + self._responder_id, + self._certs, + [*self._extensions, extension], + ) + + def sign( + self, + private_key: CertificateIssuerPrivateKeyTypes, + algorithm: hashes.HashAlgorithm | None, + ) -> OCSPResponse: + if self._response is None: + raise ValueError("You must add a response before signing") + if self._responder_id is None: + raise ValueError("You must add a responder_id before signing") + + return ocsp.create_ocsp_response( + OCSPResponseStatus.SUCCESSFUL, self, private_key, algorithm + ) + + @classmethod + def build_unsuccessful( + cls, response_status: OCSPResponseStatus + ) -> OCSPResponse: + if not isinstance(response_status, OCSPResponseStatus): + raise TypeError( + "response_status must be an item from OCSPResponseStatus" + ) + if response_status is OCSPResponseStatus.SUCCESSFUL: + raise ValueError("response_status cannot be SUCCESSFUL") + + return ocsp.create_ocsp_response(response_status, None, None, None) + + +load_der_ocsp_request = ocsp.load_der_ocsp_request +load_der_ocsp_response = ocsp.load_der_ocsp_response diff --git a/venv/lib/python3.12/site-packages/cryptography/x509/oid.py b/venv/lib/python3.12/site-packages/cryptography/x509/oid.py new file mode 100644 index 00000000..520fc7ab --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/x509/oid.py @@ -0,0 +1,37 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.hazmat._oid import ( + AttributeOID, + AuthorityInformationAccessOID, + CertificatePoliciesOID, + CRLEntryExtensionOID, + ExtendedKeyUsageOID, + ExtensionOID, + NameOID, + ObjectIdentifier, + OCSPExtensionOID, + OtherNameFormOID, + PublicKeyAlgorithmOID, + SignatureAlgorithmOID, + SubjectInformationAccessOID, +) + +__all__ = [ + "AttributeOID", + "AuthorityInformationAccessOID", + "CRLEntryExtensionOID", + "CertificatePoliciesOID", + "ExtendedKeyUsageOID", + "ExtensionOID", + "NameOID", + "OCSPExtensionOID", + "ObjectIdentifier", + "OtherNameFormOID", + "PublicKeyAlgorithmOID", + "SignatureAlgorithmOID", + "SubjectInformationAccessOID", +] diff --git a/venv/lib/python3.12/site-packages/cryptography/x509/verification.py b/venv/lib/python3.12/site-packages/cryptography/x509/verification.py new file mode 100644 index 00000000..2db4324d --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/x509/verification.py @@ -0,0 +1,34 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import typing + +from cryptography.hazmat.bindings._rust import x509 as rust_x509 +from cryptography.x509.general_name import DNSName, IPAddress + +__all__ = [ + "ClientVerifier", + "Criticality", + "ExtensionPolicy", + "Policy", + "PolicyBuilder", + "ServerVerifier", + "Store", + "Subject", + "VerificationError", + "VerifiedClient", +] + +Store = rust_x509.Store +Subject = typing.Union[DNSName, IPAddress] +VerifiedClient = rust_x509.VerifiedClient +ClientVerifier = rust_x509.ClientVerifier +ServerVerifier = rust_x509.ServerVerifier +PolicyBuilder = rust_x509.PolicyBuilder +Policy = rust_x509.Policy +ExtensionPolicy = rust_x509.ExtensionPolicy +Criticality = rust_x509.Criticality +VerificationError = rust_x509.VerificationError diff --git a/venv/lib/python3.12/site-packages/flask/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/flask/__pycache__/__init__.cpython-312.pyc index a84691ca..e1630cc0 100644 Binary files a/venv/lib/python3.12/site-packages/flask/__pycache__/__init__.cpython-312.pyc and b/venv/lib/python3.12/site-packages/flask/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/flask/__pycache__/__main__.cpython-312.pyc b/venv/lib/python3.12/site-packages/flask/__pycache__/__main__.cpython-312.pyc index f2330c97..bd633109 100644 Binary files a/venv/lib/python3.12/site-packages/flask/__pycache__/__main__.cpython-312.pyc and b/venv/lib/python3.12/site-packages/flask/__pycache__/__main__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/flask/__pycache__/app.cpython-312.pyc b/venv/lib/python3.12/site-packages/flask/__pycache__/app.cpython-312.pyc index d4d4a87f..ddf46f34 100644 Binary files a/venv/lib/python3.12/site-packages/flask/__pycache__/app.cpython-312.pyc and b/venv/lib/python3.12/site-packages/flask/__pycache__/app.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/flask/__pycache__/blueprints.cpython-312.pyc b/venv/lib/python3.12/site-packages/flask/__pycache__/blueprints.cpython-312.pyc index f743d445..9d22fdf3 100644 Binary files a/venv/lib/python3.12/site-packages/flask/__pycache__/blueprints.cpython-312.pyc and b/venv/lib/python3.12/site-packages/flask/__pycache__/blueprints.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/flask/__pycache__/cli.cpython-312.pyc b/venv/lib/python3.12/site-packages/flask/__pycache__/cli.cpython-312.pyc index b86ac4ad..ebc292cf 100644 Binary files a/venv/lib/python3.12/site-packages/flask/__pycache__/cli.cpython-312.pyc and b/venv/lib/python3.12/site-packages/flask/__pycache__/cli.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/flask/__pycache__/config.cpython-312.pyc b/venv/lib/python3.12/site-packages/flask/__pycache__/config.cpython-312.pyc index 9433e111..6d4a4da7 100644 Binary files a/venv/lib/python3.12/site-packages/flask/__pycache__/config.cpython-312.pyc and b/venv/lib/python3.12/site-packages/flask/__pycache__/config.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/flask/__pycache__/ctx.cpython-312.pyc b/venv/lib/python3.12/site-packages/flask/__pycache__/ctx.cpython-312.pyc index 55024a1a..a14c0e79 100644 Binary files a/venv/lib/python3.12/site-packages/flask/__pycache__/ctx.cpython-312.pyc and b/venv/lib/python3.12/site-packages/flask/__pycache__/ctx.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/flask/__pycache__/debughelpers.cpython-312.pyc b/venv/lib/python3.12/site-packages/flask/__pycache__/debughelpers.cpython-312.pyc index 2ad08e20..16dc4fdc 100644 Binary files a/venv/lib/python3.12/site-packages/flask/__pycache__/debughelpers.cpython-312.pyc and b/venv/lib/python3.12/site-packages/flask/__pycache__/debughelpers.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/flask/__pycache__/globals.cpython-312.pyc b/venv/lib/python3.12/site-packages/flask/__pycache__/globals.cpython-312.pyc index 61b271d2..c0bc0964 100644 Binary files a/venv/lib/python3.12/site-packages/flask/__pycache__/globals.cpython-312.pyc and b/venv/lib/python3.12/site-packages/flask/__pycache__/globals.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/flask/__pycache__/helpers.cpython-312.pyc b/venv/lib/python3.12/site-packages/flask/__pycache__/helpers.cpython-312.pyc index f282e9fc..b568b50e 100644 Binary files a/venv/lib/python3.12/site-packages/flask/__pycache__/helpers.cpython-312.pyc and b/venv/lib/python3.12/site-packages/flask/__pycache__/helpers.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/flask/__pycache__/logging.cpython-312.pyc b/venv/lib/python3.12/site-packages/flask/__pycache__/logging.cpython-312.pyc index 3b9473a2..caf0358f 100644 Binary files a/venv/lib/python3.12/site-packages/flask/__pycache__/logging.cpython-312.pyc and b/venv/lib/python3.12/site-packages/flask/__pycache__/logging.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/flask/__pycache__/sessions.cpython-312.pyc b/venv/lib/python3.12/site-packages/flask/__pycache__/sessions.cpython-312.pyc index 10f2ab04..f4705064 100644 Binary files a/venv/lib/python3.12/site-packages/flask/__pycache__/sessions.cpython-312.pyc and b/venv/lib/python3.12/site-packages/flask/__pycache__/sessions.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/flask/__pycache__/signals.cpython-312.pyc b/venv/lib/python3.12/site-packages/flask/__pycache__/signals.cpython-312.pyc index 05281619..cd4dbfd1 100644 Binary files a/venv/lib/python3.12/site-packages/flask/__pycache__/signals.cpython-312.pyc and b/venv/lib/python3.12/site-packages/flask/__pycache__/signals.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/flask/__pycache__/templating.cpython-312.pyc b/venv/lib/python3.12/site-packages/flask/__pycache__/templating.cpython-312.pyc index 1c7a3324..38fafb26 100644 Binary files a/venv/lib/python3.12/site-packages/flask/__pycache__/templating.cpython-312.pyc and b/venv/lib/python3.12/site-packages/flask/__pycache__/templating.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/flask/__pycache__/testing.cpython-312.pyc b/venv/lib/python3.12/site-packages/flask/__pycache__/testing.cpython-312.pyc index 9f37deeb..bb5b0c4f 100644 Binary files a/venv/lib/python3.12/site-packages/flask/__pycache__/testing.cpython-312.pyc and b/venv/lib/python3.12/site-packages/flask/__pycache__/testing.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/flask/__pycache__/typing.cpython-312.pyc b/venv/lib/python3.12/site-packages/flask/__pycache__/typing.cpython-312.pyc index c2ae9103..1819768f 100644 Binary files a/venv/lib/python3.12/site-packages/flask/__pycache__/typing.cpython-312.pyc and b/venv/lib/python3.12/site-packages/flask/__pycache__/typing.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/flask/__pycache__/views.cpython-312.pyc b/venv/lib/python3.12/site-packages/flask/__pycache__/views.cpython-312.pyc index 60a92840..b9bd9fd6 100644 Binary files a/venv/lib/python3.12/site-packages/flask/__pycache__/views.cpython-312.pyc and b/venv/lib/python3.12/site-packages/flask/__pycache__/views.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/flask/__pycache__/wrappers.cpython-312.pyc b/venv/lib/python3.12/site-packages/flask/__pycache__/wrappers.cpython-312.pyc index ec79fe44..1ed70f55 100644 Binary files a/venv/lib/python3.12/site-packages/flask/__pycache__/wrappers.cpython-312.pyc and b/venv/lib/python3.12/site-packages/flask/__pycache__/wrappers.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/flask/json/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/flask/json/__pycache__/__init__.cpython-312.pyc index 8ff1891e..858d5c59 100644 Binary files a/venv/lib/python3.12/site-packages/flask/json/__pycache__/__init__.cpython-312.pyc and b/venv/lib/python3.12/site-packages/flask/json/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/flask/json/__pycache__/provider.cpython-312.pyc b/venv/lib/python3.12/site-packages/flask/json/__pycache__/provider.cpython-312.pyc index 846321fe..97b4f3dc 100644 Binary files a/venv/lib/python3.12/site-packages/flask/json/__pycache__/provider.cpython-312.pyc and b/venv/lib/python3.12/site-packages/flask/json/__pycache__/provider.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/flask/json/__pycache__/tag.cpython-312.pyc b/venv/lib/python3.12/site-packages/flask/json/__pycache__/tag.cpython-312.pyc index 0d570dd1..bba6ac00 100644 Binary files a/venv/lib/python3.12/site-packages/flask/json/__pycache__/tag.cpython-312.pyc and b/venv/lib/python3.12/site-packages/flask/json/__pycache__/tag.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/flask/sansio/__pycache__/app.cpython-312.pyc b/venv/lib/python3.12/site-packages/flask/sansio/__pycache__/app.cpython-312.pyc index 63115437..eb0c020b 100644 Binary files a/venv/lib/python3.12/site-packages/flask/sansio/__pycache__/app.cpython-312.pyc and b/venv/lib/python3.12/site-packages/flask/sansio/__pycache__/app.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/flask/sansio/__pycache__/blueprints.cpython-312.pyc b/venv/lib/python3.12/site-packages/flask/sansio/__pycache__/blueprints.cpython-312.pyc index bc061594..3d36d6d8 100644 Binary files a/venv/lib/python3.12/site-packages/flask/sansio/__pycache__/blueprints.cpython-312.pyc and b/venv/lib/python3.12/site-packages/flask/sansio/__pycache__/blueprints.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/flask/sansio/__pycache__/scaffold.cpython-312.pyc b/venv/lib/python3.12/site-packages/flask/sansio/__pycache__/scaffold.cpython-312.pyc index e7f35054..59c20b8a 100644 Binary files a/venv/lib/python3.12/site-packages/flask/sansio/__pycache__/scaffold.cpython-312.pyc and b/venv/lib/python3.12/site-packages/flask/sansio/__pycache__/scaffold.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/flask_cors/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/flask_cors/__pycache__/__init__.cpython-312.pyc index ee446b75..b79bcaf3 100644 Binary files a/venv/lib/python3.12/site-packages/flask_cors/__pycache__/__init__.cpython-312.pyc and b/venv/lib/python3.12/site-packages/flask_cors/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/flask_cors/__pycache__/core.cpython-312.pyc b/venv/lib/python3.12/site-packages/flask_cors/__pycache__/core.cpython-312.pyc index c760b3c8..ef02c206 100644 Binary files a/venv/lib/python3.12/site-packages/flask_cors/__pycache__/core.cpython-312.pyc and b/venv/lib/python3.12/site-packages/flask_cors/__pycache__/core.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/flask_cors/__pycache__/decorator.cpython-312.pyc b/venv/lib/python3.12/site-packages/flask_cors/__pycache__/decorator.cpython-312.pyc index bb845b39..260f75ac 100644 Binary files a/venv/lib/python3.12/site-packages/flask_cors/__pycache__/decorator.cpython-312.pyc and b/venv/lib/python3.12/site-packages/flask_cors/__pycache__/decorator.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/flask_cors/__pycache__/extension.cpython-312.pyc b/venv/lib/python3.12/site-packages/flask_cors/__pycache__/extension.cpython-312.pyc index 913c4e08..a5cf0660 100644 Binary files a/venv/lib/python3.12/site-packages/flask_cors/__pycache__/extension.cpython-312.pyc and b/venv/lib/python3.12/site-packages/flask_cors/__pycache__/extension.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/flask_cors/__pycache__/version.cpython-312.pyc b/venv/lib/python3.12/site-packages/flask_cors/__pycache__/version.cpython-312.pyc index 9bcb55ea..75d9940a 100644 Binary files a/venv/lib/python3.12/site-packages/flask_cors/__pycache__/version.cpython-312.pyc and b/venv/lib/python3.12/site-packages/flask_cors/__pycache__/version.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/itsdangerous/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/itsdangerous/__pycache__/__init__.cpython-312.pyc index 8c5381f5..af51c58b 100644 Binary files a/venv/lib/python3.12/site-packages/itsdangerous/__pycache__/__init__.cpython-312.pyc and b/venv/lib/python3.12/site-packages/itsdangerous/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/itsdangerous/__pycache__/_json.cpython-312.pyc b/venv/lib/python3.12/site-packages/itsdangerous/__pycache__/_json.cpython-312.pyc index 3de625de..b674cc18 100644 Binary files a/venv/lib/python3.12/site-packages/itsdangerous/__pycache__/_json.cpython-312.pyc and b/venv/lib/python3.12/site-packages/itsdangerous/__pycache__/_json.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/itsdangerous/__pycache__/encoding.cpython-312.pyc b/venv/lib/python3.12/site-packages/itsdangerous/__pycache__/encoding.cpython-312.pyc index 870a5b96..6184e35f 100644 Binary files a/venv/lib/python3.12/site-packages/itsdangerous/__pycache__/encoding.cpython-312.pyc and b/venv/lib/python3.12/site-packages/itsdangerous/__pycache__/encoding.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/itsdangerous/__pycache__/exc.cpython-312.pyc b/venv/lib/python3.12/site-packages/itsdangerous/__pycache__/exc.cpython-312.pyc index 663049f2..69a9ff17 100644 Binary files a/venv/lib/python3.12/site-packages/itsdangerous/__pycache__/exc.cpython-312.pyc and b/venv/lib/python3.12/site-packages/itsdangerous/__pycache__/exc.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/itsdangerous/__pycache__/serializer.cpython-312.pyc b/venv/lib/python3.12/site-packages/itsdangerous/__pycache__/serializer.cpython-312.pyc index b9b9e125..628b2c83 100644 Binary files a/venv/lib/python3.12/site-packages/itsdangerous/__pycache__/serializer.cpython-312.pyc and b/venv/lib/python3.12/site-packages/itsdangerous/__pycache__/serializer.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/itsdangerous/__pycache__/signer.cpython-312.pyc b/venv/lib/python3.12/site-packages/itsdangerous/__pycache__/signer.cpython-312.pyc index a74d8019..7236651b 100644 Binary files a/venv/lib/python3.12/site-packages/itsdangerous/__pycache__/signer.cpython-312.pyc and b/venv/lib/python3.12/site-packages/itsdangerous/__pycache__/signer.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/itsdangerous/__pycache__/timed.cpython-312.pyc b/venv/lib/python3.12/site-packages/itsdangerous/__pycache__/timed.cpython-312.pyc index 7bf2151b..312540a3 100644 Binary files a/venv/lib/python3.12/site-packages/itsdangerous/__pycache__/timed.cpython-312.pyc and b/venv/lib/python3.12/site-packages/itsdangerous/__pycache__/timed.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/itsdangerous/__pycache__/url_safe.cpython-312.pyc b/venv/lib/python3.12/site-packages/itsdangerous/__pycache__/url_safe.cpython-312.pyc index ea891da7..9e568235 100644 Binary files a/venv/lib/python3.12/site-packages/itsdangerous/__pycache__/url_safe.cpython-312.pyc and b/venv/lib/python3.12/site-packages/itsdangerous/__pycache__/url_safe.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/jinja2/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/jinja2/__pycache__/__init__.cpython-312.pyc index 34b552ac..ba7da57d 100644 Binary files a/venv/lib/python3.12/site-packages/jinja2/__pycache__/__init__.cpython-312.pyc and b/venv/lib/python3.12/site-packages/jinja2/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/jinja2/__pycache__/_identifier.cpython-312.pyc b/venv/lib/python3.12/site-packages/jinja2/__pycache__/_identifier.cpython-312.pyc index c50b764c..5df511ca 100644 Binary files a/venv/lib/python3.12/site-packages/jinja2/__pycache__/_identifier.cpython-312.pyc and b/venv/lib/python3.12/site-packages/jinja2/__pycache__/_identifier.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/jinja2/__pycache__/async_utils.cpython-312.pyc b/venv/lib/python3.12/site-packages/jinja2/__pycache__/async_utils.cpython-312.pyc index a7da7b6d..1486c1d1 100644 Binary files a/venv/lib/python3.12/site-packages/jinja2/__pycache__/async_utils.cpython-312.pyc and b/venv/lib/python3.12/site-packages/jinja2/__pycache__/async_utils.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/jinja2/__pycache__/bccache.cpython-312.pyc b/venv/lib/python3.12/site-packages/jinja2/__pycache__/bccache.cpython-312.pyc index e80e7fc4..913bd12a 100644 Binary files a/venv/lib/python3.12/site-packages/jinja2/__pycache__/bccache.cpython-312.pyc and b/venv/lib/python3.12/site-packages/jinja2/__pycache__/bccache.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/jinja2/__pycache__/compiler.cpython-312.pyc b/venv/lib/python3.12/site-packages/jinja2/__pycache__/compiler.cpython-312.pyc index 02ede9aa..6fbcb27a 100644 Binary files a/venv/lib/python3.12/site-packages/jinja2/__pycache__/compiler.cpython-312.pyc and b/venv/lib/python3.12/site-packages/jinja2/__pycache__/compiler.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/jinja2/__pycache__/constants.cpython-312.pyc b/venv/lib/python3.12/site-packages/jinja2/__pycache__/constants.cpython-312.pyc index 9a280177..b34f64e2 100644 Binary files a/venv/lib/python3.12/site-packages/jinja2/__pycache__/constants.cpython-312.pyc and b/venv/lib/python3.12/site-packages/jinja2/__pycache__/constants.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/jinja2/__pycache__/debug.cpython-312.pyc b/venv/lib/python3.12/site-packages/jinja2/__pycache__/debug.cpython-312.pyc index 657b1e98..caf4b077 100644 Binary files a/venv/lib/python3.12/site-packages/jinja2/__pycache__/debug.cpython-312.pyc and b/venv/lib/python3.12/site-packages/jinja2/__pycache__/debug.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/jinja2/__pycache__/defaults.cpython-312.pyc b/venv/lib/python3.12/site-packages/jinja2/__pycache__/defaults.cpython-312.pyc index c8e52e15..a9e17fbd 100644 Binary files a/venv/lib/python3.12/site-packages/jinja2/__pycache__/defaults.cpython-312.pyc and b/venv/lib/python3.12/site-packages/jinja2/__pycache__/defaults.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/jinja2/__pycache__/environment.cpython-312.pyc b/venv/lib/python3.12/site-packages/jinja2/__pycache__/environment.cpython-312.pyc index 1b874cdb..77d2cb0d 100644 Binary files a/venv/lib/python3.12/site-packages/jinja2/__pycache__/environment.cpython-312.pyc and b/venv/lib/python3.12/site-packages/jinja2/__pycache__/environment.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/jinja2/__pycache__/exceptions.cpython-312.pyc b/venv/lib/python3.12/site-packages/jinja2/__pycache__/exceptions.cpython-312.pyc index 927aba1b..33053fbe 100644 Binary files a/venv/lib/python3.12/site-packages/jinja2/__pycache__/exceptions.cpython-312.pyc and b/venv/lib/python3.12/site-packages/jinja2/__pycache__/exceptions.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/jinja2/__pycache__/ext.cpython-312.pyc b/venv/lib/python3.12/site-packages/jinja2/__pycache__/ext.cpython-312.pyc index cf8beb06..011a1c01 100644 Binary files a/venv/lib/python3.12/site-packages/jinja2/__pycache__/ext.cpython-312.pyc and b/venv/lib/python3.12/site-packages/jinja2/__pycache__/ext.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/jinja2/__pycache__/filters.cpython-312.pyc b/venv/lib/python3.12/site-packages/jinja2/__pycache__/filters.cpython-312.pyc index d81ad397..b6efe30d 100644 Binary files a/venv/lib/python3.12/site-packages/jinja2/__pycache__/filters.cpython-312.pyc and b/venv/lib/python3.12/site-packages/jinja2/__pycache__/filters.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/jinja2/__pycache__/idtracking.cpython-312.pyc b/venv/lib/python3.12/site-packages/jinja2/__pycache__/idtracking.cpython-312.pyc index 2a9640bf..34867eaa 100644 Binary files a/venv/lib/python3.12/site-packages/jinja2/__pycache__/idtracking.cpython-312.pyc and b/venv/lib/python3.12/site-packages/jinja2/__pycache__/idtracking.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/jinja2/__pycache__/lexer.cpython-312.pyc b/venv/lib/python3.12/site-packages/jinja2/__pycache__/lexer.cpython-312.pyc index b092ee0d..1cc2c05a 100644 Binary files a/venv/lib/python3.12/site-packages/jinja2/__pycache__/lexer.cpython-312.pyc and b/venv/lib/python3.12/site-packages/jinja2/__pycache__/lexer.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/jinja2/__pycache__/loaders.cpython-312.pyc b/venv/lib/python3.12/site-packages/jinja2/__pycache__/loaders.cpython-312.pyc index e992197a..d7346ec1 100644 Binary files a/venv/lib/python3.12/site-packages/jinja2/__pycache__/loaders.cpython-312.pyc and b/venv/lib/python3.12/site-packages/jinja2/__pycache__/loaders.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/jinja2/__pycache__/meta.cpython-312.pyc b/venv/lib/python3.12/site-packages/jinja2/__pycache__/meta.cpython-312.pyc index 528ee510..4113ff12 100644 Binary files a/venv/lib/python3.12/site-packages/jinja2/__pycache__/meta.cpython-312.pyc and b/venv/lib/python3.12/site-packages/jinja2/__pycache__/meta.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/jinja2/__pycache__/nativetypes.cpython-312.pyc b/venv/lib/python3.12/site-packages/jinja2/__pycache__/nativetypes.cpython-312.pyc index cc2a9ad8..a4365db6 100644 Binary files a/venv/lib/python3.12/site-packages/jinja2/__pycache__/nativetypes.cpython-312.pyc and b/venv/lib/python3.12/site-packages/jinja2/__pycache__/nativetypes.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/jinja2/__pycache__/nodes.cpython-312.pyc b/venv/lib/python3.12/site-packages/jinja2/__pycache__/nodes.cpython-312.pyc index 84048bd7..a289ddaf 100644 Binary files a/venv/lib/python3.12/site-packages/jinja2/__pycache__/nodes.cpython-312.pyc and b/venv/lib/python3.12/site-packages/jinja2/__pycache__/nodes.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/jinja2/__pycache__/optimizer.cpython-312.pyc b/venv/lib/python3.12/site-packages/jinja2/__pycache__/optimizer.cpython-312.pyc index 251a9396..e9f397d0 100644 Binary files a/venv/lib/python3.12/site-packages/jinja2/__pycache__/optimizer.cpython-312.pyc and b/venv/lib/python3.12/site-packages/jinja2/__pycache__/optimizer.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/jinja2/__pycache__/parser.cpython-312.pyc b/venv/lib/python3.12/site-packages/jinja2/__pycache__/parser.cpython-312.pyc index ef444a15..df7851c6 100644 Binary files a/venv/lib/python3.12/site-packages/jinja2/__pycache__/parser.cpython-312.pyc and b/venv/lib/python3.12/site-packages/jinja2/__pycache__/parser.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/jinja2/__pycache__/runtime.cpython-312.pyc b/venv/lib/python3.12/site-packages/jinja2/__pycache__/runtime.cpython-312.pyc index 227c13f5..8d451051 100644 Binary files a/venv/lib/python3.12/site-packages/jinja2/__pycache__/runtime.cpython-312.pyc and b/venv/lib/python3.12/site-packages/jinja2/__pycache__/runtime.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/jinja2/__pycache__/sandbox.cpython-312.pyc b/venv/lib/python3.12/site-packages/jinja2/__pycache__/sandbox.cpython-312.pyc index 16db873c..c46ef869 100644 Binary files a/venv/lib/python3.12/site-packages/jinja2/__pycache__/sandbox.cpython-312.pyc and b/venv/lib/python3.12/site-packages/jinja2/__pycache__/sandbox.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/jinja2/__pycache__/tests.cpython-312.pyc b/venv/lib/python3.12/site-packages/jinja2/__pycache__/tests.cpython-312.pyc index 624c820d..15652a3b 100644 Binary files a/venv/lib/python3.12/site-packages/jinja2/__pycache__/tests.cpython-312.pyc and b/venv/lib/python3.12/site-packages/jinja2/__pycache__/tests.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/jinja2/__pycache__/utils.cpython-312.pyc b/venv/lib/python3.12/site-packages/jinja2/__pycache__/utils.cpython-312.pyc index 20d45e5f..d86821e4 100644 Binary files a/venv/lib/python3.12/site-packages/jinja2/__pycache__/utils.cpython-312.pyc and b/venv/lib/python3.12/site-packages/jinja2/__pycache__/utils.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/jinja2/__pycache__/visitor.cpython-312.pyc b/venv/lib/python3.12/site-packages/jinja2/__pycache__/visitor.cpython-312.pyc index 8a74b77f..ba8e141d 100644 Binary files a/venv/lib/python3.12/site-packages/jinja2/__pycache__/visitor.cpython-312.pyc and b/venv/lib/python3.12/site-packages/jinja2/__pycache__/visitor.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/markupsafe/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/markupsafe/__pycache__/__init__.cpython-312.pyc index 72ebaa6f..d9606d28 100644 Binary files a/venv/lib/python3.12/site-packages/markupsafe/__pycache__/__init__.cpython-312.pyc and b/venv/lib/python3.12/site-packages/markupsafe/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/markupsafe/__pycache__/_native.cpython-312.pyc b/venv/lib/python3.12/site-packages/markupsafe/__pycache__/_native.cpython-312.pyc index f93b2ae8..396730de 100644 Binary files a/venv/lib/python3.12/site-packages/markupsafe/__pycache__/_native.cpython-312.pyc and b/venv/lib/python3.12/site-packages/markupsafe/__pycache__/_native.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pycparser-2.23.dist-info/INSTALLER b/venv/lib/python3.12/site-packages/pycparser-2.23.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pycparser-2.23.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.12/site-packages/pycparser-2.23.dist-info/LICENSE b/venv/lib/python3.12/site-packages/pycparser-2.23.dist-info/LICENSE new file mode 100644 index 00000000..bee14a47 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pycparser-2.23.dist-info/LICENSE @@ -0,0 +1,27 @@ +pycparser -- A C parser in Python + +Copyright (c) 2008-2022, Eli Bendersky +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. +* Neither the name of the copyright holder nor the names of its contributors may + be used to endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE +GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT +OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python3.12/site-packages/pycparser-2.23.dist-info/METADATA b/venv/lib/python3.12/site-packages/pycparser-2.23.dist-info/METADATA new file mode 100644 index 00000000..318b034f --- /dev/null +++ b/venv/lib/python3.12/site-packages/pycparser-2.23.dist-info/METADATA @@ -0,0 +1,28 @@ +Metadata-Version: 2.1 +Name: pycparser +Version: 2.23 +Summary: C parser in Python +Home-page: https://github.com/eliben/pycparser +Author: Eli Bendersky +Author-email: eliben@gmail.com +Maintainer: Eli Bendersky +License: BSD-3-Clause +Platform: Cross Platform +Classifier: Development Status :: 5 - Production/Stable +Classifier: License :: OSI Approved :: BSD License +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Requires-Python: >=3.8 +License-File: LICENSE + + + pycparser is a complete parser of the C language, written in + pure Python using the PLY parsing library. + It parses C code into an AST and can serve as a front-end for + C compilers or analysis tools. + diff --git a/venv/lib/python3.12/site-packages/pycparser-2.23.dist-info/RECORD b/venv/lib/python3.12/site-packages/pycparser-2.23.dist-info/RECORD new file mode 100644 index 00000000..55a5bb2e --- /dev/null +++ b/venv/lib/python3.12/site-packages/pycparser-2.23.dist-info/RECORD @@ -0,0 +1,41 @@ +pycparser-2.23.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +pycparser-2.23.dist-info/LICENSE,sha256=DIRjmTaep23de1xE_m0WSXQV_PAV9cu1CMJL-YuBxbE,1543 +pycparser-2.23.dist-info/METADATA,sha256=osmhHMxa3n5sPwv5WeUpyyPnm76eohXYGZyKGmWbPFc,993 +pycparser-2.23.dist-info/RECORD,, +pycparser-2.23.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92 +pycparser-2.23.dist-info/top_level.txt,sha256=c-lPcS74L_8KoH7IE6PQF5ofyirRQNV4VhkbSFIPeWM,10 +pycparser/__init__.py,sha256=FQFl5XuxXZiYHrBuN1EElN1COlR8k4aCSdG7h7a7zLw,2918 +pycparser/__pycache__/__init__.cpython-312.pyc,, +pycparser/__pycache__/_ast_gen.cpython-312.pyc,, +pycparser/__pycache__/_build_tables.cpython-312.pyc,, +pycparser/__pycache__/ast_transforms.cpython-312.pyc,, +pycparser/__pycache__/c_ast.cpython-312.pyc,, +pycparser/__pycache__/c_generator.cpython-312.pyc,, +pycparser/__pycache__/c_lexer.cpython-312.pyc,, +pycparser/__pycache__/c_parser.cpython-312.pyc,, +pycparser/__pycache__/lextab.cpython-312.pyc,, +pycparser/__pycache__/plyparser.cpython-312.pyc,, +pycparser/__pycache__/yacctab.cpython-312.pyc,, +pycparser/_ast_gen.py,sha256=0JRVnDW-Jw-3IjVlo8je9rbAcp6Ko7toHAnB5zi7h0Q,10555 +pycparser/_build_tables.py,sha256=4d_UkIxJ4YfHTVn6xBzBA52wDo7qxg1B6aZAJYJas9Q,1087 +pycparser/_c_ast.cfg,sha256=ld5ezE9yzIJFIVAUfw7ezJSlMi4nXKNCzfmqjOyQTNo,4255 +pycparser/ast_transforms.py,sha256=GTMYlUgWmXd5wJVyovXY1qzzAqjxzCpVVg0664dKGBs,5691 +pycparser/c_ast.py,sha256=HWeOrfYdCY0u5XaYhE1i60uVyE3yMWdcxzECUX-DqJw,31445 +pycparser/c_generator.py,sha256=XWK3oGM_eVD5d_JfgJxFO95Y6vUMfRi8pov5FQjHH2s,17790 +pycparser/c_lexer.py,sha256=bq7LALBDUw452KT3J7QzHj2qMoCugGWjITNPWD7yiOE,17728 +pycparser/c_parser.py,sha256=ujQZ7y6Qded9h5SDrhtgSlHw0vSYVa_yiMtW9ZnsezY,75462 +pycparser/lextab.py,sha256=eLh3spnPArpAKCVAEK1JCOgUX0L9wrJdfOTLyw8sLRE,8776 +pycparser/ply/__init__.py,sha256=q4s86QwRsYRa20L9ueSxfh-hPihpftBjDOvYa2_SS2Y,102 +pycparser/ply/__pycache__/__init__.cpython-312.pyc,, +pycparser/ply/__pycache__/cpp.cpython-312.pyc,, +pycparser/ply/__pycache__/ctokens.cpython-312.pyc,, +pycparser/ply/__pycache__/lex.cpython-312.pyc,, +pycparser/ply/__pycache__/yacc.cpython-312.pyc,, +pycparser/ply/__pycache__/ygen.cpython-312.pyc,, +pycparser/ply/cpp.py,sha256=UtC3ylTWp5_1MKA-PLCuwKQR8zSOnlGuGGIdzj8xS98,33282 +pycparser/ply/ctokens.py,sha256=MKksnN40TehPhgVfxCJhjj_BjL943apreABKYz-bl0Y,3177 +pycparser/ply/lex.py,sha256=rCMi0yjlZmjH5SNXj_Yds1VxSDkaG2thS7351YvfN-I,42926 +pycparser/ply/yacc.py,sha256=eatSDkRLgRr6X3-hoDk_SQQv065R0BdL2K7fQ54CgVM,137323 +pycparser/ply/ygen.py,sha256=2JYNeYtrPz1JzLSLO3d4GsS8zJU8jY_I_CR1VI9gWrA,2251 +pycparser/plyparser.py,sha256=8tLOoEytcapvWrr1JfCf7Dog-wulBtS1YrDs8S7JfMo,4875 +pycparser/yacctab.py,sha256=ovX4pQW7sjbRf8c-GxpU6J65pqVGAgbU_G_YuW5NPHM,213007 diff --git a/venv/lib/python3.12/site-packages/pycparser-2.23.dist-info/WHEEL b/venv/lib/python3.12/site-packages/pycparser-2.23.dist-info/WHEEL new file mode 100644 index 00000000..98c0d20b --- /dev/null +++ b/venv/lib/python3.12/site-packages/pycparser-2.23.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.42.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/lib/python3.12/site-packages/pycparser-2.23.dist-info/top_level.txt b/venv/lib/python3.12/site-packages/pycparser-2.23.dist-info/top_level.txt new file mode 100644 index 00000000..dc1c9e10 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pycparser-2.23.dist-info/top_level.txt @@ -0,0 +1 @@ +pycparser diff --git a/venv/lib/python3.12/site-packages/pycparser/__init__.py b/venv/lib/python3.12/site-packages/pycparser/__init__.py new file mode 100644 index 00000000..170b1818 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pycparser/__init__.py @@ -0,0 +1,93 @@ +#----------------------------------------------------------------- +# pycparser: __init__.py +# +# This package file exports some convenience functions for +# interacting with pycparser +# +# Eli Bendersky [https://eli.thegreenplace.net/] +# License: BSD +#----------------------------------------------------------------- +__all__ = ['c_lexer', 'c_parser', 'c_ast'] +__version__ = '2.23' + +import io +from subprocess import check_output +from .c_parser import CParser + + +def preprocess_file(filename, cpp_path='cpp', cpp_args=''): + """ Preprocess a file using cpp. + + filename: + Name of the file you want to preprocess. + + cpp_path: + cpp_args: + Refer to the documentation of parse_file for the meaning of these + arguments. + + When successful, returns the preprocessed file's contents. + Errors from cpp will be printed out. + """ + path_list = [cpp_path] + if isinstance(cpp_args, list): + path_list += cpp_args + elif cpp_args != '': + path_list += [cpp_args] + path_list += [filename] + + try: + # Note the use of universal_newlines to treat all newlines + # as \n for Python's purpose + text = check_output(path_list, universal_newlines=True) + except OSError as e: + raise RuntimeError("Unable to invoke 'cpp'. " + + 'Make sure its path was passed correctly\n' + + ('Original error: %s' % e)) + + return text + + +def parse_file(filename, use_cpp=False, cpp_path='cpp', cpp_args='', + parser=None, encoding=None): + """ Parse a C file using pycparser. + + filename: + Name of the file you want to parse. + + use_cpp: + Set to True if you want to execute the C pre-processor + on the file prior to parsing it. + + cpp_path: + If use_cpp is True, this is the path to 'cpp' on your + system. If no path is provided, it attempts to just + execute 'cpp', so it must be in your PATH. + + cpp_args: + If use_cpp is True, set this to the command line arguments strings + to cpp. Be careful with quotes - it's best to pass a raw string + (r'') here. For example: + r'-I../utils/fake_libc_include' + If several arguments are required, pass a list of strings. + + encoding: + Encoding to use for the file to parse + + parser: + Optional parser object to be used instead of the default CParser + + When successful, an AST is returned. ParseError can be + thrown if the file doesn't parse successfully. + + Errors from cpp will be printed out. + """ + if use_cpp: + text = preprocess_file(filename, cpp_path, cpp_args) + else: + with io.open(filename, encoding=encoding) as f: + text = f.read() + + if parser is None: + parser = CParser() + return parser.parse(text, filename) diff --git a/venv/lib/python3.12/site-packages/pycparser/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pycparser/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..453d0e07 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pycparser/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pycparser/__pycache__/_ast_gen.cpython-312.pyc b/venv/lib/python3.12/site-packages/pycparser/__pycache__/_ast_gen.cpython-312.pyc new file mode 100644 index 00000000..69de0f15 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pycparser/__pycache__/_ast_gen.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pycparser/__pycache__/_build_tables.cpython-312.pyc b/venv/lib/python3.12/site-packages/pycparser/__pycache__/_build_tables.cpython-312.pyc new file mode 100644 index 00000000..f63add38 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pycparser/__pycache__/_build_tables.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pycparser/__pycache__/ast_transforms.cpython-312.pyc b/venv/lib/python3.12/site-packages/pycparser/__pycache__/ast_transforms.cpython-312.pyc new file mode 100644 index 00000000..59502f28 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pycparser/__pycache__/ast_transforms.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pycparser/__pycache__/c_ast.cpython-312.pyc b/venv/lib/python3.12/site-packages/pycparser/__pycache__/c_ast.cpython-312.pyc new file mode 100644 index 00000000..c8cdbd08 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pycparser/__pycache__/c_ast.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pycparser/__pycache__/c_generator.cpython-312.pyc b/venv/lib/python3.12/site-packages/pycparser/__pycache__/c_generator.cpython-312.pyc new file mode 100644 index 00000000..3c034ece Binary files /dev/null and b/venv/lib/python3.12/site-packages/pycparser/__pycache__/c_generator.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pycparser/__pycache__/c_lexer.cpython-312.pyc b/venv/lib/python3.12/site-packages/pycparser/__pycache__/c_lexer.cpython-312.pyc new file mode 100644 index 00000000..c9aacced Binary files /dev/null and b/venv/lib/python3.12/site-packages/pycparser/__pycache__/c_lexer.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pycparser/__pycache__/c_parser.cpython-312.pyc b/venv/lib/python3.12/site-packages/pycparser/__pycache__/c_parser.cpython-312.pyc new file mode 100644 index 00000000..f9ee85bb Binary files /dev/null and b/venv/lib/python3.12/site-packages/pycparser/__pycache__/c_parser.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pycparser/__pycache__/lextab.cpython-312.pyc b/venv/lib/python3.12/site-packages/pycparser/__pycache__/lextab.cpython-312.pyc new file mode 100644 index 00000000..d4c360da Binary files /dev/null and b/venv/lib/python3.12/site-packages/pycparser/__pycache__/lextab.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pycparser/__pycache__/plyparser.cpython-312.pyc b/venv/lib/python3.12/site-packages/pycparser/__pycache__/plyparser.cpython-312.pyc new file mode 100644 index 00000000..ce660bed Binary files /dev/null and b/venv/lib/python3.12/site-packages/pycparser/__pycache__/plyparser.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pycparser/__pycache__/yacctab.cpython-312.pyc b/venv/lib/python3.12/site-packages/pycparser/__pycache__/yacctab.cpython-312.pyc new file mode 100644 index 00000000..7e980f59 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pycparser/__pycache__/yacctab.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pycparser/_ast_gen.py b/venv/lib/python3.12/site-packages/pycparser/_ast_gen.py new file mode 100644 index 00000000..0f7d330b --- /dev/null +++ b/venv/lib/python3.12/site-packages/pycparser/_ast_gen.py @@ -0,0 +1,336 @@ +#----------------------------------------------------------------- +# _ast_gen.py +# +# Generates the AST Node classes from a specification given in +# a configuration file +# +# The design of this module was inspired by astgen.py from the +# Python 2.5 code-base. +# +# Eli Bendersky [https://eli.thegreenplace.net/] +# License: BSD +#----------------------------------------------------------------- +from string import Template + + +class ASTCodeGenerator(object): + def __init__(self, cfg_filename='_c_ast.cfg'): + """ Initialize the code generator from a configuration + file. + """ + self.cfg_filename = cfg_filename + self.node_cfg = [NodeCfg(name, contents) + for (name, contents) in self.parse_cfgfile(cfg_filename)] + + def generate(self, file=None): + """ Generates the code into file, an open file buffer. + """ + src = Template(_PROLOGUE_COMMENT).substitute( + cfg_filename=self.cfg_filename) + + src += _PROLOGUE_CODE + for node_cfg in self.node_cfg: + src += node_cfg.generate_source() + '\n\n' + + file.write(src) + + def parse_cfgfile(self, filename): + """ Parse the configuration file and yield pairs of + (name, contents) for each node. + """ + with open(filename, "r") as f: + for line in f: + line = line.strip() + if not line or line.startswith('#'): + continue + colon_i = line.find(':') + lbracket_i = line.find('[') + rbracket_i = line.find(']') + if colon_i < 1 or lbracket_i <= colon_i or rbracket_i <= lbracket_i: + raise RuntimeError("Invalid line in %s:\n%s\n" % (filename, line)) + + name = line[:colon_i] + val = line[lbracket_i + 1:rbracket_i] + vallist = [v.strip() for v in val.split(',')] if val else [] + yield name, vallist + + +class NodeCfg(object): + """ Node configuration. + + name: node name + contents: a list of contents - attributes and child nodes + See comment at the top of the configuration file for details. + """ + + def __init__(self, name, contents): + self.name = name + self.all_entries = [] + self.attr = [] + self.child = [] + self.seq_child = [] + + for entry in contents: + clean_entry = entry.rstrip('*') + self.all_entries.append(clean_entry) + + if entry.endswith('**'): + self.seq_child.append(clean_entry) + elif entry.endswith('*'): + self.child.append(clean_entry) + else: + self.attr.append(entry) + + def generate_source(self): + src = self._gen_init() + src += '\n' + self._gen_children() + src += '\n' + self._gen_iter() + src += '\n' + self._gen_attr_names() + return src + + def _gen_init(self): + src = "class %s(Node):\n" % self.name + + if self.all_entries: + args = ', '.join(self.all_entries) + slots = ', '.join("'{0}'".format(e) for e in self.all_entries) + slots += ", 'coord', '__weakref__'" + arglist = '(self, %s, coord=None)' % args + else: + slots = "'coord', '__weakref__'" + arglist = '(self, coord=None)' + + src += " __slots__ = (%s)\n" % slots + src += " def __init__%s:\n" % arglist + + for name in self.all_entries + ['coord']: + src += " self.%s = %s\n" % (name, name) + + return src + + def _gen_children(self): + src = ' def children(self):\n' + + if self.all_entries: + src += ' nodelist = []\n' + + for child in self.child: + src += ( + ' if self.%(child)s is not None:' + + ' nodelist.append(("%(child)s", self.%(child)s))\n') % ( + dict(child=child)) + + for seq_child in self.seq_child: + src += ( + ' for i, child in enumerate(self.%(child)s or []):\n' + ' nodelist.append(("%(child)s[%%d]" %% i, child))\n') % ( + dict(child=seq_child)) + + src += ' return tuple(nodelist)\n' + else: + src += ' return ()\n' + + return src + + def _gen_iter(self): + src = ' def __iter__(self):\n' + + if self.all_entries: + for child in self.child: + src += ( + ' if self.%(child)s is not None:\n' + + ' yield self.%(child)s\n') % (dict(child=child)) + + for seq_child in self.seq_child: + src += ( + ' for child in (self.%(child)s or []):\n' + ' yield child\n') % (dict(child=seq_child)) + + if not (self.child or self.seq_child): + # Empty generator + src += ( + ' return\n' + + ' yield\n') + else: + # Empty generator + src += ( + ' return\n' + + ' yield\n') + + return src + + def _gen_attr_names(self): + src = " attr_names = (" + ''.join("%r, " % nm for nm in self.attr) + ')' + return src + + +_PROLOGUE_COMMENT = \ +r'''#----------------------------------------------------------------- +# ** ATTENTION ** +# This code was automatically generated from the file: +# $cfg_filename +# +# Do not modify it directly. Modify the configuration file and +# run the generator again. +# ** ** *** ** ** +# +# pycparser: c_ast.py +# +# AST Node classes. +# +# Eli Bendersky [https://eli.thegreenplace.net/] +# License: BSD +#----------------------------------------------------------------- + +''' + +_PROLOGUE_CODE = r''' +import sys + +def _repr(obj): + """ + Get the representation of an object, with dedicated pprint-like format for lists. + """ + if isinstance(obj, list): + return '[' + (',\n '.join((_repr(e).replace('\n', '\n ') for e in obj))) + '\n]' + else: + return repr(obj) + +class Node(object): + __slots__ = () + """ Abstract base class for AST nodes. + """ + def __repr__(self): + """ Generates a python representation of the current node + """ + result = self.__class__.__name__ + '(' + + indent = '' + separator = '' + for name in self.__slots__[:-2]: + result += separator + result += indent + result += name + '=' + (_repr(getattr(self, name)).replace('\n', '\n ' + (' ' * (len(name) + len(self.__class__.__name__))))) + + separator = ',' + indent = '\n ' + (' ' * len(self.__class__.__name__)) + + result += indent + ')' + + return result + + def children(self): + """ A sequence of all children that are Nodes + """ + pass + + def show(self, buf=sys.stdout, offset=0, attrnames=False, nodenames=False, showcoord=False, _my_node_name=None): + """ Pretty print the Node and all its attributes and + children (recursively) to a buffer. + + buf: + Open IO buffer into which the Node is printed. + + offset: + Initial offset (amount of leading spaces) + + attrnames: + True if you want to see the attribute names in + name=value pairs. False to only see the values. + + nodenames: + True if you want to see the actual node names + within their parents. + + showcoord: + Do you want the coordinates of each Node to be + displayed. + """ + lead = ' ' * offset + if nodenames and _my_node_name is not None: + buf.write(lead + self.__class__.__name__+ ' <' + _my_node_name + '>: ') + else: + buf.write(lead + self.__class__.__name__+ ': ') + + if self.attr_names: + if attrnames: + nvlist = [(n, getattr(self,n)) for n in self.attr_names] + attrstr = ', '.join('%s=%s' % nv for nv in nvlist) + else: + vlist = [getattr(self, n) for n in self.attr_names] + attrstr = ', '.join('%s' % v for v in vlist) + buf.write(attrstr) + + if showcoord: + buf.write(' (at %s)' % self.coord) + buf.write('\n') + + for (child_name, child) in self.children(): + child.show( + buf, + offset=offset + 2, + attrnames=attrnames, + nodenames=nodenames, + showcoord=showcoord, + _my_node_name=child_name) + + +class NodeVisitor(object): + """ A base NodeVisitor class for visiting c_ast nodes. + Subclass it and define your own visit_XXX methods, where + XXX is the class name you want to visit with these + methods. + + For example: + + class ConstantVisitor(NodeVisitor): + def __init__(self): + self.values = [] + + def visit_Constant(self, node): + self.values.append(node.value) + + Creates a list of values of all the constant nodes + encountered below the given node. To use it: + + cv = ConstantVisitor() + cv.visit(node) + + Notes: + + * generic_visit() will be called for AST nodes for which + no visit_XXX method was defined. + * The children of nodes for which a visit_XXX was + defined will not be visited - if you need this, call + generic_visit() on the node. + You can use: + NodeVisitor.generic_visit(self, node) + * Modeled after Python's own AST visiting facilities + (the ast module of Python 3.0) + """ + + _method_cache = None + + def visit(self, node): + """ Visit a node. + """ + + if self._method_cache is None: + self._method_cache = {} + + visitor = self._method_cache.get(node.__class__.__name__, None) + if visitor is None: + method = 'visit_' + node.__class__.__name__ + visitor = getattr(self, method, self.generic_visit) + self._method_cache[node.__class__.__name__] = visitor + + return visitor(node) + + def generic_visit(self, node): + """ Called if no explicit visitor function exists for a + node. Implements preorder visiting of the node. + """ + for c in node: + self.visit(c) + +''' diff --git a/venv/lib/python3.12/site-packages/pycparser/_build_tables.py b/venv/lib/python3.12/site-packages/pycparser/_build_tables.py new file mode 100644 index 00000000..4f371079 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pycparser/_build_tables.py @@ -0,0 +1,40 @@ +#----------------------------------------------------------------- +# pycparser: _build_tables.py +# +# A dummy for generating the lexing/parsing tables and and +# compiling them into .pyc for faster execution in optimized mode. +# Also generates AST code from the configuration file. +# Should be called from the pycparser directory. +# +# Eli Bendersky [https://eli.thegreenplace.net/] +# License: BSD +#----------------------------------------------------------------- + +# Insert '.' and '..' as first entries to the search path for modules. +# Restricted environments like embeddable python do not include the +# current working directory on startup. +import importlib +import sys +sys.path[0:0] = ['.', '..'] + +# Generate c_ast.py +from _ast_gen import ASTCodeGenerator +ast_gen = ASTCodeGenerator('_c_ast.cfg') +ast_gen.generate(open('c_ast.py', 'w')) + +from pycparser import c_parser + +# Generates the tables +# +c_parser.CParser( + lex_optimize=True, + yacc_debug=False, + yacc_optimize=True) + +# Load to compile into .pyc +# +importlib.invalidate_caches() + +import lextab +import yacctab +import c_ast diff --git a/venv/lib/python3.12/site-packages/pycparser/_c_ast.cfg b/venv/lib/python3.12/site-packages/pycparser/_c_ast.cfg new file mode 100644 index 00000000..0626533e --- /dev/null +++ b/venv/lib/python3.12/site-packages/pycparser/_c_ast.cfg @@ -0,0 +1,195 @@ +#----------------------------------------------------------------- +# pycparser: _c_ast.cfg +# +# Defines the AST Node classes used in pycparser. +# +# Each entry is a Node sub-class name, listing the attributes +# and child nodes of the class: +# * - a child node +# ** - a sequence of child nodes +# - an attribute +# +# Eli Bendersky [https://eli.thegreenplace.net/] +# License: BSD +#----------------------------------------------------------------- + +# ArrayDecl is a nested declaration of an array with the given type. +# dim: the dimension (for example, constant 42) +# dim_quals: list of dimension qualifiers, to support C99's allowing 'const' +# and 'static' within the array dimension in function declarations. +ArrayDecl: [type*, dim*, dim_quals] + +ArrayRef: [name*, subscript*] + +# op: =, +=, /= etc. +# +Assignment: [op, lvalue*, rvalue*] + +Alignas: [alignment*] + +BinaryOp: [op, left*, right*] + +Break: [] + +Case: [expr*, stmts**] + +Cast: [to_type*, expr*] + +# Compound statement in C99 is a list of block items (declarations or +# statements). +# +Compound: [block_items**] + +# Compound literal (anonymous aggregate) for C99. +# (type-name) {initializer_list} +# type: the typename +# init: InitList for the initializer list +# +CompoundLiteral: [type*, init*] + +# type: int, char, float, string, etc. +# +Constant: [type, value] + +Continue: [] + +# name: the variable being declared +# quals: list of qualifiers (const, volatile) +# funcspec: list function specifiers (i.e. inline in C99) +# storage: list of storage specifiers (extern, register, etc.) +# type: declaration type (probably nested with all the modifiers) +# init: initialization value, or None +# bitsize: bit field size, or None +# +Decl: [name, quals, align, storage, funcspec, type*, init*, bitsize*] + +DeclList: [decls**] + +Default: [stmts**] + +DoWhile: [cond*, stmt*] + +# Represents the ellipsis (...) parameter in a function +# declaration +# +EllipsisParam: [] + +# An empty statement (a semicolon ';' on its own) +# +EmptyStatement: [] + +# Enumeration type specifier +# name: an optional ID +# values: an EnumeratorList +# +Enum: [name, values*] + +# A name/value pair for enumeration values +# +Enumerator: [name, value*] + +# A list of enumerators +# +EnumeratorList: [enumerators**] + +# A list of expressions separated by the comma operator. +# +ExprList: [exprs**] + +# This is the top of the AST, representing a single C file (a +# translation unit in K&R jargon). It contains a list of +# "external-declaration"s, which is either declarations (Decl), +# Typedef or function definitions (FuncDef). +# +FileAST: [ext**] + +# for (init; cond; next) stmt +# +For: [init*, cond*, next*, stmt*] + +# name: Id +# args: ExprList +# +FuncCall: [name*, args*] + +# type (args) +# +FuncDecl: [args*, type*] + +# Function definition: a declarator for the function name and +# a body, which is a compound statement. +# There's an optional list of parameter declarations for old +# K&R-style definitions +# +FuncDef: [decl*, param_decls**, body*] + +Goto: [name] + +ID: [name] + +# Holder for types that are a simple identifier (e.g. the built +# ins void, char etc. and typedef-defined types) +# +IdentifierType: [names] + +If: [cond*, iftrue*, iffalse*] + +# An initialization list used for compound literals. +# +InitList: [exprs**] + +Label: [name, stmt*] + +# A named initializer for C99. +# The name of a NamedInitializer is a sequence of Nodes, because +# names can be hierarchical and contain constant expressions. +# +NamedInitializer: [name**, expr*] + +# a list of comma separated function parameter declarations +# +ParamList: [params**] + +PtrDecl: [quals, type*] + +Return: [expr*] + +StaticAssert: [cond*, message*] + +# name: struct tag name +# decls: declaration of members +# +Struct: [name, decls**] + +# type: . or -> +# name.field or name->field +# +StructRef: [name*, type, field*] + +Switch: [cond*, stmt*] + +# cond ? iftrue : iffalse +# +TernaryOp: [cond*, iftrue*, iffalse*] + +# A base type declaration +# +TypeDecl: [declname, quals, align, type*] + +# A typedef declaration. +# Very similar to Decl, but without some attributes +# +Typedef: [name, quals, storage, type*] + +Typename: [name, quals, align, type*] + +UnaryOp: [op, expr*] + +# name: union tag name +# decls: declaration of members +# +Union: [name, decls**] + +While: [cond*, stmt*] + +Pragma: [string] diff --git a/venv/lib/python3.12/site-packages/pycparser/ast_transforms.py b/venv/lib/python3.12/site-packages/pycparser/ast_transforms.py new file mode 100644 index 00000000..367dcf54 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pycparser/ast_transforms.py @@ -0,0 +1,164 @@ +#------------------------------------------------------------------------------ +# pycparser: ast_transforms.py +# +# Some utilities used by the parser to create a friendlier AST. +# +# Eli Bendersky [https://eli.thegreenplace.net/] +# License: BSD +#------------------------------------------------------------------------------ + +from . import c_ast + + +def fix_switch_cases(switch_node): + """ The 'case' statements in a 'switch' come out of parsing with one + child node, so subsequent statements are just tucked to the parent + Compound. Additionally, consecutive (fall-through) case statements + come out messy. This is a peculiarity of the C grammar. The following: + + switch (myvar) { + case 10: + k = 10; + p = k + 1; + return 10; + case 20: + case 30: + return 20; + default: + break; + } + + Creates this tree (pseudo-dump): + + Switch + ID: myvar + Compound: + Case 10: + k = 10 + p = k + 1 + return 10 + Case 20: + Case 30: + return 20 + Default: + break + + The goal of this transform is to fix this mess, turning it into the + following: + + Switch + ID: myvar + Compound: + Case 10: + k = 10 + p = k + 1 + return 10 + Case 20: + Case 30: + return 20 + Default: + break + + A fixed AST node is returned. The argument may be modified. + """ + assert isinstance(switch_node, c_ast.Switch) + if not isinstance(switch_node.stmt, c_ast.Compound): + return switch_node + + # The new Compound child for the Switch, which will collect children in the + # correct order + new_compound = c_ast.Compound([], switch_node.stmt.coord) + + # The last Case/Default node + last_case = None + + # Goes over the children of the Compound below the Switch, adding them + # either directly below new_compound or below the last Case as appropriate + # (for `switch(cond) {}`, block_items would have been None) + for child in (switch_node.stmt.block_items or []): + if isinstance(child, (c_ast.Case, c_ast.Default)): + # If it's a Case/Default: + # 1. Add it to the Compound and mark as "last case" + # 2. If its immediate child is also a Case or Default, promote it + # to a sibling. + new_compound.block_items.append(child) + _extract_nested_case(child, new_compound.block_items) + last_case = new_compound.block_items[-1] + else: + # Other statements are added as children to the last case, if it + # exists. + if last_case is None: + new_compound.block_items.append(child) + else: + last_case.stmts.append(child) + + switch_node.stmt = new_compound + return switch_node + + +def _extract_nested_case(case_node, stmts_list): + """ Recursively extract consecutive Case statements that are made nested + by the parser and add them to the stmts_list. + """ + if isinstance(case_node.stmts[0], (c_ast.Case, c_ast.Default)): + stmts_list.append(case_node.stmts.pop()) + _extract_nested_case(stmts_list[-1], stmts_list) + + +def fix_atomic_specifiers(decl): + """ Atomic specifiers like _Atomic(type) are unusually structured, + conferring a qualifier upon the contained type. + + This function fixes a decl with atomic specifiers to have a sane AST + structure, by removing spurious Typename->TypeDecl pairs and attaching + the _Atomic qualifier in the right place. + """ + # There can be multiple levels of _Atomic in a decl; fix them until a + # fixed point is reached. + while True: + decl, found = _fix_atomic_specifiers_once(decl) + if not found: + break + + # Make sure to add an _Atomic qual on the topmost decl if needed. Also + # restore the declname on the innermost TypeDecl (it gets placed in the + # wrong place during construction). + typ = decl + while not isinstance(typ, c_ast.TypeDecl): + try: + typ = typ.type + except AttributeError: + return decl + if '_Atomic' in typ.quals and '_Atomic' not in decl.quals: + decl.quals.append('_Atomic') + if typ.declname is None: + typ.declname = decl.name + + return decl + + +def _fix_atomic_specifiers_once(decl): + """ Performs one 'fix' round of atomic specifiers. + Returns (modified_decl, found) where found is True iff a fix was made. + """ + parent = decl + grandparent = None + node = decl.type + while node is not None: + if isinstance(node, c_ast.Typename) and '_Atomic' in node.quals: + break + try: + grandparent = parent + parent = node + node = node.type + except AttributeError: + # If we've reached a node without a `type` field, it means we won't + # find what we're looking for at this point; give up the search + # and return the original decl unmodified. + return decl, False + + assert isinstance(parent, c_ast.TypeDecl) + grandparent.type = node.type + if '_Atomic' not in node.type.quals: + node.type.quals.append('_Atomic') + return decl, True diff --git a/venv/lib/python3.12/site-packages/pycparser/c_ast.py b/venv/lib/python3.12/site-packages/pycparser/c_ast.py new file mode 100644 index 00000000..6575a2ad --- /dev/null +++ b/venv/lib/python3.12/site-packages/pycparser/c_ast.py @@ -0,0 +1,1125 @@ +#----------------------------------------------------------------- +# ** ATTENTION ** +# This code was automatically generated from the file: +# _c_ast.cfg +# +# Do not modify it directly. Modify the configuration file and +# run the generator again. +# ** ** *** ** ** +# +# pycparser: c_ast.py +# +# AST Node classes. +# +# Eli Bendersky [https://eli.thegreenplace.net/] +# License: BSD +#----------------------------------------------------------------- + + +import sys + +def _repr(obj): + """ + Get the representation of an object, with dedicated pprint-like format for lists. + """ + if isinstance(obj, list): + return '[' + (',\n '.join((_repr(e).replace('\n', '\n ') for e in obj))) + '\n]' + else: + return repr(obj) + +class Node(object): + __slots__ = () + """ Abstract base class for AST nodes. + """ + def __repr__(self): + """ Generates a python representation of the current node + """ + result = self.__class__.__name__ + '(' + + indent = '' + separator = '' + for name in self.__slots__[:-2]: + result += separator + result += indent + result += name + '=' + (_repr(getattr(self, name)).replace('\n', '\n ' + (' ' * (len(name) + len(self.__class__.__name__))))) + + separator = ',' + indent = '\n ' + (' ' * len(self.__class__.__name__)) + + result += indent + ')' + + return result + + def children(self): + """ A sequence of all children that are Nodes + """ + pass + + def show(self, buf=sys.stdout, offset=0, attrnames=False, nodenames=False, showcoord=False, _my_node_name=None): + """ Pretty print the Node and all its attributes and + children (recursively) to a buffer. + + buf: + Open IO buffer into which the Node is printed. + + offset: + Initial offset (amount of leading spaces) + + attrnames: + True if you want to see the attribute names in + name=value pairs. False to only see the values. + + nodenames: + True if you want to see the actual node names + within their parents. + + showcoord: + Do you want the coordinates of each Node to be + displayed. + """ + lead = ' ' * offset + if nodenames and _my_node_name is not None: + buf.write(lead + self.__class__.__name__+ ' <' + _my_node_name + '>: ') + else: + buf.write(lead + self.__class__.__name__+ ': ') + + if self.attr_names: + if attrnames: + nvlist = [(n, getattr(self,n)) for n in self.attr_names] + attrstr = ', '.join('%s=%s' % nv for nv in nvlist) + else: + vlist = [getattr(self, n) for n in self.attr_names] + attrstr = ', '.join('%s' % v for v in vlist) + buf.write(attrstr) + + if showcoord: + buf.write(' (at %s)' % self.coord) + buf.write('\n') + + for (child_name, child) in self.children(): + child.show( + buf, + offset=offset + 2, + attrnames=attrnames, + nodenames=nodenames, + showcoord=showcoord, + _my_node_name=child_name) + + +class NodeVisitor(object): + """ A base NodeVisitor class for visiting c_ast nodes. + Subclass it and define your own visit_XXX methods, where + XXX is the class name you want to visit with these + methods. + + For example: + + class ConstantVisitor(NodeVisitor): + def __init__(self): + self.values = [] + + def visit_Constant(self, node): + self.values.append(node.value) + + Creates a list of values of all the constant nodes + encountered below the given node. To use it: + + cv = ConstantVisitor() + cv.visit(node) + + Notes: + + * generic_visit() will be called for AST nodes for which + no visit_XXX method was defined. + * The children of nodes for which a visit_XXX was + defined will not be visited - if you need this, call + generic_visit() on the node. + You can use: + NodeVisitor.generic_visit(self, node) + * Modeled after Python's own AST visiting facilities + (the ast module of Python 3.0) + """ + + _method_cache = None + + def visit(self, node): + """ Visit a node. + """ + + if self._method_cache is None: + self._method_cache = {} + + visitor = self._method_cache.get(node.__class__.__name__, None) + if visitor is None: + method = 'visit_' + node.__class__.__name__ + visitor = getattr(self, method, self.generic_visit) + self._method_cache[node.__class__.__name__] = visitor + + return visitor(node) + + def generic_visit(self, node): + """ Called if no explicit visitor function exists for a + node. Implements preorder visiting of the node. + """ + for c in node: + self.visit(c) + +class ArrayDecl(Node): + __slots__ = ('type', 'dim', 'dim_quals', 'coord', '__weakref__') + def __init__(self, type, dim, dim_quals, coord=None): + self.type = type + self.dim = dim + self.dim_quals = dim_quals + self.coord = coord + + def children(self): + nodelist = [] + if self.type is not None: nodelist.append(("type", self.type)) + if self.dim is not None: nodelist.append(("dim", self.dim)) + return tuple(nodelist) + + def __iter__(self): + if self.type is not None: + yield self.type + if self.dim is not None: + yield self.dim + + attr_names = ('dim_quals', ) + +class ArrayRef(Node): + __slots__ = ('name', 'subscript', 'coord', '__weakref__') + def __init__(self, name, subscript, coord=None): + self.name = name + self.subscript = subscript + self.coord = coord + + def children(self): + nodelist = [] + if self.name is not None: nodelist.append(("name", self.name)) + if self.subscript is not None: nodelist.append(("subscript", self.subscript)) + return tuple(nodelist) + + def __iter__(self): + if self.name is not None: + yield self.name + if self.subscript is not None: + yield self.subscript + + attr_names = () + +class Assignment(Node): + __slots__ = ('op', 'lvalue', 'rvalue', 'coord', '__weakref__') + def __init__(self, op, lvalue, rvalue, coord=None): + self.op = op + self.lvalue = lvalue + self.rvalue = rvalue + self.coord = coord + + def children(self): + nodelist = [] + if self.lvalue is not None: nodelist.append(("lvalue", self.lvalue)) + if self.rvalue is not None: nodelist.append(("rvalue", self.rvalue)) + return tuple(nodelist) + + def __iter__(self): + if self.lvalue is not None: + yield self.lvalue + if self.rvalue is not None: + yield self.rvalue + + attr_names = ('op', ) + +class Alignas(Node): + __slots__ = ('alignment', 'coord', '__weakref__') + def __init__(self, alignment, coord=None): + self.alignment = alignment + self.coord = coord + + def children(self): + nodelist = [] + if self.alignment is not None: nodelist.append(("alignment", self.alignment)) + return tuple(nodelist) + + def __iter__(self): + if self.alignment is not None: + yield self.alignment + + attr_names = () + +class BinaryOp(Node): + __slots__ = ('op', 'left', 'right', 'coord', '__weakref__') + def __init__(self, op, left, right, coord=None): + self.op = op + self.left = left + self.right = right + self.coord = coord + + def children(self): + nodelist = [] + if self.left is not None: nodelist.append(("left", self.left)) + if self.right is not None: nodelist.append(("right", self.right)) + return tuple(nodelist) + + def __iter__(self): + if self.left is not None: + yield self.left + if self.right is not None: + yield self.right + + attr_names = ('op', ) + +class Break(Node): + __slots__ = ('coord', '__weakref__') + def __init__(self, coord=None): + self.coord = coord + + def children(self): + return () + + def __iter__(self): + return + yield + + attr_names = () + +class Case(Node): + __slots__ = ('expr', 'stmts', 'coord', '__weakref__') + def __init__(self, expr, stmts, coord=None): + self.expr = expr + self.stmts = stmts + self.coord = coord + + def children(self): + nodelist = [] + if self.expr is not None: nodelist.append(("expr", self.expr)) + for i, child in enumerate(self.stmts or []): + nodelist.append(("stmts[%d]" % i, child)) + return tuple(nodelist) + + def __iter__(self): + if self.expr is not None: + yield self.expr + for child in (self.stmts or []): + yield child + + attr_names = () + +class Cast(Node): + __slots__ = ('to_type', 'expr', 'coord', '__weakref__') + def __init__(self, to_type, expr, coord=None): + self.to_type = to_type + self.expr = expr + self.coord = coord + + def children(self): + nodelist = [] + if self.to_type is not None: nodelist.append(("to_type", self.to_type)) + if self.expr is not None: nodelist.append(("expr", self.expr)) + return tuple(nodelist) + + def __iter__(self): + if self.to_type is not None: + yield self.to_type + if self.expr is not None: + yield self.expr + + attr_names = () + +class Compound(Node): + __slots__ = ('block_items', 'coord', '__weakref__') + def __init__(self, block_items, coord=None): + self.block_items = block_items + self.coord = coord + + def children(self): + nodelist = [] + for i, child in enumerate(self.block_items or []): + nodelist.append(("block_items[%d]" % i, child)) + return tuple(nodelist) + + def __iter__(self): + for child in (self.block_items or []): + yield child + + attr_names = () + +class CompoundLiteral(Node): + __slots__ = ('type', 'init', 'coord', '__weakref__') + def __init__(self, type, init, coord=None): + self.type = type + self.init = init + self.coord = coord + + def children(self): + nodelist = [] + if self.type is not None: nodelist.append(("type", self.type)) + if self.init is not None: nodelist.append(("init", self.init)) + return tuple(nodelist) + + def __iter__(self): + if self.type is not None: + yield self.type + if self.init is not None: + yield self.init + + attr_names = () + +class Constant(Node): + __slots__ = ('type', 'value', 'coord', '__weakref__') + def __init__(self, type, value, coord=None): + self.type = type + self.value = value + self.coord = coord + + def children(self): + nodelist = [] + return tuple(nodelist) + + def __iter__(self): + return + yield + + attr_names = ('type', 'value', ) + +class Continue(Node): + __slots__ = ('coord', '__weakref__') + def __init__(self, coord=None): + self.coord = coord + + def children(self): + return () + + def __iter__(self): + return + yield + + attr_names = () + +class Decl(Node): + __slots__ = ('name', 'quals', 'align', 'storage', 'funcspec', 'type', 'init', 'bitsize', 'coord', '__weakref__') + def __init__(self, name, quals, align, storage, funcspec, type, init, bitsize, coord=None): + self.name = name + self.quals = quals + self.align = align + self.storage = storage + self.funcspec = funcspec + self.type = type + self.init = init + self.bitsize = bitsize + self.coord = coord + + def children(self): + nodelist = [] + if self.type is not None: nodelist.append(("type", self.type)) + if self.init is not None: nodelist.append(("init", self.init)) + if self.bitsize is not None: nodelist.append(("bitsize", self.bitsize)) + return tuple(nodelist) + + def __iter__(self): + if self.type is not None: + yield self.type + if self.init is not None: + yield self.init + if self.bitsize is not None: + yield self.bitsize + + attr_names = ('name', 'quals', 'align', 'storage', 'funcspec', ) + +class DeclList(Node): + __slots__ = ('decls', 'coord', '__weakref__') + def __init__(self, decls, coord=None): + self.decls = decls + self.coord = coord + + def children(self): + nodelist = [] + for i, child in enumerate(self.decls or []): + nodelist.append(("decls[%d]" % i, child)) + return tuple(nodelist) + + def __iter__(self): + for child in (self.decls or []): + yield child + + attr_names = () + +class Default(Node): + __slots__ = ('stmts', 'coord', '__weakref__') + def __init__(self, stmts, coord=None): + self.stmts = stmts + self.coord = coord + + def children(self): + nodelist = [] + for i, child in enumerate(self.stmts or []): + nodelist.append(("stmts[%d]" % i, child)) + return tuple(nodelist) + + def __iter__(self): + for child in (self.stmts or []): + yield child + + attr_names = () + +class DoWhile(Node): + __slots__ = ('cond', 'stmt', 'coord', '__weakref__') + def __init__(self, cond, stmt, coord=None): + self.cond = cond + self.stmt = stmt + self.coord = coord + + def children(self): + nodelist = [] + if self.cond is not None: nodelist.append(("cond", self.cond)) + if self.stmt is not None: nodelist.append(("stmt", self.stmt)) + return tuple(nodelist) + + def __iter__(self): + if self.cond is not None: + yield self.cond + if self.stmt is not None: + yield self.stmt + + attr_names = () + +class EllipsisParam(Node): + __slots__ = ('coord', '__weakref__') + def __init__(self, coord=None): + self.coord = coord + + def children(self): + return () + + def __iter__(self): + return + yield + + attr_names = () + +class EmptyStatement(Node): + __slots__ = ('coord', '__weakref__') + def __init__(self, coord=None): + self.coord = coord + + def children(self): + return () + + def __iter__(self): + return + yield + + attr_names = () + +class Enum(Node): + __slots__ = ('name', 'values', 'coord', '__weakref__') + def __init__(self, name, values, coord=None): + self.name = name + self.values = values + self.coord = coord + + def children(self): + nodelist = [] + if self.values is not None: nodelist.append(("values", self.values)) + return tuple(nodelist) + + def __iter__(self): + if self.values is not None: + yield self.values + + attr_names = ('name', ) + +class Enumerator(Node): + __slots__ = ('name', 'value', 'coord', '__weakref__') + def __init__(self, name, value, coord=None): + self.name = name + self.value = value + self.coord = coord + + def children(self): + nodelist = [] + if self.value is not None: nodelist.append(("value", self.value)) + return tuple(nodelist) + + def __iter__(self): + if self.value is not None: + yield self.value + + attr_names = ('name', ) + +class EnumeratorList(Node): + __slots__ = ('enumerators', 'coord', '__weakref__') + def __init__(self, enumerators, coord=None): + self.enumerators = enumerators + self.coord = coord + + def children(self): + nodelist = [] + for i, child in enumerate(self.enumerators or []): + nodelist.append(("enumerators[%d]" % i, child)) + return tuple(nodelist) + + def __iter__(self): + for child in (self.enumerators or []): + yield child + + attr_names = () + +class ExprList(Node): + __slots__ = ('exprs', 'coord', '__weakref__') + def __init__(self, exprs, coord=None): + self.exprs = exprs + self.coord = coord + + def children(self): + nodelist = [] + for i, child in enumerate(self.exprs or []): + nodelist.append(("exprs[%d]" % i, child)) + return tuple(nodelist) + + def __iter__(self): + for child in (self.exprs or []): + yield child + + attr_names = () + +class FileAST(Node): + __slots__ = ('ext', 'coord', '__weakref__') + def __init__(self, ext, coord=None): + self.ext = ext + self.coord = coord + + def children(self): + nodelist = [] + for i, child in enumerate(self.ext or []): + nodelist.append(("ext[%d]" % i, child)) + return tuple(nodelist) + + def __iter__(self): + for child in (self.ext or []): + yield child + + attr_names = () + +class For(Node): + __slots__ = ('init', 'cond', 'next', 'stmt', 'coord', '__weakref__') + def __init__(self, init, cond, next, stmt, coord=None): + self.init = init + self.cond = cond + self.next = next + self.stmt = stmt + self.coord = coord + + def children(self): + nodelist = [] + if self.init is not None: nodelist.append(("init", self.init)) + if self.cond is not None: nodelist.append(("cond", self.cond)) + if self.next is not None: nodelist.append(("next", self.next)) + if self.stmt is not None: nodelist.append(("stmt", self.stmt)) + return tuple(nodelist) + + def __iter__(self): + if self.init is not None: + yield self.init + if self.cond is not None: + yield self.cond + if self.next is not None: + yield self.next + if self.stmt is not None: + yield self.stmt + + attr_names = () + +class FuncCall(Node): + __slots__ = ('name', 'args', 'coord', '__weakref__') + def __init__(self, name, args, coord=None): + self.name = name + self.args = args + self.coord = coord + + def children(self): + nodelist = [] + if self.name is not None: nodelist.append(("name", self.name)) + if self.args is not None: nodelist.append(("args", self.args)) + return tuple(nodelist) + + def __iter__(self): + if self.name is not None: + yield self.name + if self.args is not None: + yield self.args + + attr_names = () + +class FuncDecl(Node): + __slots__ = ('args', 'type', 'coord', '__weakref__') + def __init__(self, args, type, coord=None): + self.args = args + self.type = type + self.coord = coord + + def children(self): + nodelist = [] + if self.args is not None: nodelist.append(("args", self.args)) + if self.type is not None: nodelist.append(("type", self.type)) + return tuple(nodelist) + + def __iter__(self): + if self.args is not None: + yield self.args + if self.type is not None: + yield self.type + + attr_names = () + +class FuncDef(Node): + __slots__ = ('decl', 'param_decls', 'body', 'coord', '__weakref__') + def __init__(self, decl, param_decls, body, coord=None): + self.decl = decl + self.param_decls = param_decls + self.body = body + self.coord = coord + + def children(self): + nodelist = [] + if self.decl is not None: nodelist.append(("decl", self.decl)) + if self.body is not None: nodelist.append(("body", self.body)) + for i, child in enumerate(self.param_decls or []): + nodelist.append(("param_decls[%d]" % i, child)) + return tuple(nodelist) + + def __iter__(self): + if self.decl is not None: + yield self.decl + if self.body is not None: + yield self.body + for child in (self.param_decls or []): + yield child + + attr_names = () + +class Goto(Node): + __slots__ = ('name', 'coord', '__weakref__') + def __init__(self, name, coord=None): + self.name = name + self.coord = coord + + def children(self): + nodelist = [] + return tuple(nodelist) + + def __iter__(self): + return + yield + + attr_names = ('name', ) + +class ID(Node): + __slots__ = ('name', 'coord', '__weakref__') + def __init__(self, name, coord=None): + self.name = name + self.coord = coord + + def children(self): + nodelist = [] + return tuple(nodelist) + + def __iter__(self): + return + yield + + attr_names = ('name', ) + +class IdentifierType(Node): + __slots__ = ('names', 'coord', '__weakref__') + def __init__(self, names, coord=None): + self.names = names + self.coord = coord + + def children(self): + nodelist = [] + return tuple(nodelist) + + def __iter__(self): + return + yield + + attr_names = ('names', ) + +class If(Node): + __slots__ = ('cond', 'iftrue', 'iffalse', 'coord', '__weakref__') + def __init__(self, cond, iftrue, iffalse, coord=None): + self.cond = cond + self.iftrue = iftrue + self.iffalse = iffalse + self.coord = coord + + def children(self): + nodelist = [] + if self.cond is not None: nodelist.append(("cond", self.cond)) + if self.iftrue is not None: nodelist.append(("iftrue", self.iftrue)) + if self.iffalse is not None: nodelist.append(("iffalse", self.iffalse)) + return tuple(nodelist) + + def __iter__(self): + if self.cond is not None: + yield self.cond + if self.iftrue is not None: + yield self.iftrue + if self.iffalse is not None: + yield self.iffalse + + attr_names = () + +class InitList(Node): + __slots__ = ('exprs', 'coord', '__weakref__') + def __init__(self, exprs, coord=None): + self.exprs = exprs + self.coord = coord + + def children(self): + nodelist = [] + for i, child in enumerate(self.exprs or []): + nodelist.append(("exprs[%d]" % i, child)) + return tuple(nodelist) + + def __iter__(self): + for child in (self.exprs or []): + yield child + + attr_names = () + +class Label(Node): + __slots__ = ('name', 'stmt', 'coord', '__weakref__') + def __init__(self, name, stmt, coord=None): + self.name = name + self.stmt = stmt + self.coord = coord + + def children(self): + nodelist = [] + if self.stmt is not None: nodelist.append(("stmt", self.stmt)) + return tuple(nodelist) + + def __iter__(self): + if self.stmt is not None: + yield self.stmt + + attr_names = ('name', ) + +class NamedInitializer(Node): + __slots__ = ('name', 'expr', 'coord', '__weakref__') + def __init__(self, name, expr, coord=None): + self.name = name + self.expr = expr + self.coord = coord + + def children(self): + nodelist = [] + if self.expr is not None: nodelist.append(("expr", self.expr)) + for i, child in enumerate(self.name or []): + nodelist.append(("name[%d]" % i, child)) + return tuple(nodelist) + + def __iter__(self): + if self.expr is not None: + yield self.expr + for child in (self.name or []): + yield child + + attr_names = () + +class ParamList(Node): + __slots__ = ('params', 'coord', '__weakref__') + def __init__(self, params, coord=None): + self.params = params + self.coord = coord + + def children(self): + nodelist = [] + for i, child in enumerate(self.params or []): + nodelist.append(("params[%d]" % i, child)) + return tuple(nodelist) + + def __iter__(self): + for child in (self.params or []): + yield child + + attr_names = () + +class PtrDecl(Node): + __slots__ = ('quals', 'type', 'coord', '__weakref__') + def __init__(self, quals, type, coord=None): + self.quals = quals + self.type = type + self.coord = coord + + def children(self): + nodelist = [] + if self.type is not None: nodelist.append(("type", self.type)) + return tuple(nodelist) + + def __iter__(self): + if self.type is not None: + yield self.type + + attr_names = ('quals', ) + +class Return(Node): + __slots__ = ('expr', 'coord', '__weakref__') + def __init__(self, expr, coord=None): + self.expr = expr + self.coord = coord + + def children(self): + nodelist = [] + if self.expr is not None: nodelist.append(("expr", self.expr)) + return tuple(nodelist) + + def __iter__(self): + if self.expr is not None: + yield self.expr + + attr_names = () + +class StaticAssert(Node): + __slots__ = ('cond', 'message', 'coord', '__weakref__') + def __init__(self, cond, message, coord=None): + self.cond = cond + self.message = message + self.coord = coord + + def children(self): + nodelist = [] + if self.cond is not None: nodelist.append(("cond", self.cond)) + if self.message is not None: nodelist.append(("message", self.message)) + return tuple(nodelist) + + def __iter__(self): + if self.cond is not None: + yield self.cond + if self.message is not None: + yield self.message + + attr_names = () + +class Struct(Node): + __slots__ = ('name', 'decls', 'coord', '__weakref__') + def __init__(self, name, decls, coord=None): + self.name = name + self.decls = decls + self.coord = coord + + def children(self): + nodelist = [] + for i, child in enumerate(self.decls or []): + nodelist.append(("decls[%d]" % i, child)) + return tuple(nodelist) + + def __iter__(self): + for child in (self.decls or []): + yield child + + attr_names = ('name', ) + +class StructRef(Node): + __slots__ = ('name', 'type', 'field', 'coord', '__weakref__') + def __init__(self, name, type, field, coord=None): + self.name = name + self.type = type + self.field = field + self.coord = coord + + def children(self): + nodelist = [] + if self.name is not None: nodelist.append(("name", self.name)) + if self.field is not None: nodelist.append(("field", self.field)) + return tuple(nodelist) + + def __iter__(self): + if self.name is not None: + yield self.name + if self.field is not None: + yield self.field + + attr_names = ('type', ) + +class Switch(Node): + __slots__ = ('cond', 'stmt', 'coord', '__weakref__') + def __init__(self, cond, stmt, coord=None): + self.cond = cond + self.stmt = stmt + self.coord = coord + + def children(self): + nodelist = [] + if self.cond is not None: nodelist.append(("cond", self.cond)) + if self.stmt is not None: nodelist.append(("stmt", self.stmt)) + return tuple(nodelist) + + def __iter__(self): + if self.cond is not None: + yield self.cond + if self.stmt is not None: + yield self.stmt + + attr_names = () + +class TernaryOp(Node): + __slots__ = ('cond', 'iftrue', 'iffalse', 'coord', '__weakref__') + def __init__(self, cond, iftrue, iffalse, coord=None): + self.cond = cond + self.iftrue = iftrue + self.iffalse = iffalse + self.coord = coord + + def children(self): + nodelist = [] + if self.cond is not None: nodelist.append(("cond", self.cond)) + if self.iftrue is not None: nodelist.append(("iftrue", self.iftrue)) + if self.iffalse is not None: nodelist.append(("iffalse", self.iffalse)) + return tuple(nodelist) + + def __iter__(self): + if self.cond is not None: + yield self.cond + if self.iftrue is not None: + yield self.iftrue + if self.iffalse is not None: + yield self.iffalse + + attr_names = () + +class TypeDecl(Node): + __slots__ = ('declname', 'quals', 'align', 'type', 'coord', '__weakref__') + def __init__(self, declname, quals, align, type, coord=None): + self.declname = declname + self.quals = quals + self.align = align + self.type = type + self.coord = coord + + def children(self): + nodelist = [] + if self.type is not None: nodelist.append(("type", self.type)) + return tuple(nodelist) + + def __iter__(self): + if self.type is not None: + yield self.type + + attr_names = ('declname', 'quals', 'align', ) + +class Typedef(Node): + __slots__ = ('name', 'quals', 'storage', 'type', 'coord', '__weakref__') + def __init__(self, name, quals, storage, type, coord=None): + self.name = name + self.quals = quals + self.storage = storage + self.type = type + self.coord = coord + + def children(self): + nodelist = [] + if self.type is not None: nodelist.append(("type", self.type)) + return tuple(nodelist) + + def __iter__(self): + if self.type is not None: + yield self.type + + attr_names = ('name', 'quals', 'storage', ) + +class Typename(Node): + __slots__ = ('name', 'quals', 'align', 'type', 'coord', '__weakref__') + def __init__(self, name, quals, align, type, coord=None): + self.name = name + self.quals = quals + self.align = align + self.type = type + self.coord = coord + + def children(self): + nodelist = [] + if self.type is not None: nodelist.append(("type", self.type)) + return tuple(nodelist) + + def __iter__(self): + if self.type is not None: + yield self.type + + attr_names = ('name', 'quals', 'align', ) + +class UnaryOp(Node): + __slots__ = ('op', 'expr', 'coord', '__weakref__') + def __init__(self, op, expr, coord=None): + self.op = op + self.expr = expr + self.coord = coord + + def children(self): + nodelist = [] + if self.expr is not None: nodelist.append(("expr", self.expr)) + return tuple(nodelist) + + def __iter__(self): + if self.expr is not None: + yield self.expr + + attr_names = ('op', ) + +class Union(Node): + __slots__ = ('name', 'decls', 'coord', '__weakref__') + def __init__(self, name, decls, coord=None): + self.name = name + self.decls = decls + self.coord = coord + + def children(self): + nodelist = [] + for i, child in enumerate(self.decls or []): + nodelist.append(("decls[%d]" % i, child)) + return tuple(nodelist) + + def __iter__(self): + for child in (self.decls or []): + yield child + + attr_names = ('name', ) + +class While(Node): + __slots__ = ('cond', 'stmt', 'coord', '__weakref__') + def __init__(self, cond, stmt, coord=None): + self.cond = cond + self.stmt = stmt + self.coord = coord + + def children(self): + nodelist = [] + if self.cond is not None: nodelist.append(("cond", self.cond)) + if self.stmt is not None: nodelist.append(("stmt", self.stmt)) + return tuple(nodelist) + + def __iter__(self): + if self.cond is not None: + yield self.cond + if self.stmt is not None: + yield self.stmt + + attr_names = () + +class Pragma(Node): + __slots__ = ('string', 'coord', '__weakref__') + def __init__(self, string, coord=None): + self.string = string + self.coord = coord + + def children(self): + nodelist = [] + return tuple(nodelist) + + def __iter__(self): + return + yield + + attr_names = ('string', ) + diff --git a/venv/lib/python3.12/site-packages/pycparser/c_generator.py b/venv/lib/python3.12/site-packages/pycparser/c_generator.py new file mode 100644 index 00000000..f8ab0a47 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pycparser/c_generator.py @@ -0,0 +1,502 @@ +#------------------------------------------------------------------------------ +# pycparser: c_generator.py +# +# C code generator from pycparser AST nodes. +# +# Eli Bendersky [https://eli.thegreenplace.net/] +# License: BSD +#------------------------------------------------------------------------------ +from . import c_ast + + +class CGenerator(object): + """ Uses the same visitor pattern as c_ast.NodeVisitor, but modified to + return a value from each visit method, using string accumulation in + generic_visit. + """ + def __init__(self, reduce_parentheses=False): + """ Constructs C-code generator + + reduce_parentheses: + if True, eliminates needless parentheses on binary operators + """ + # Statements start with indentation of self.indent_level spaces, using + # the _make_indent method. + self.indent_level = 0 + self.reduce_parentheses = reduce_parentheses + + def _make_indent(self): + return ' ' * self.indent_level + + def visit(self, node): + method = 'visit_' + node.__class__.__name__ + return getattr(self, method, self.generic_visit)(node) + + def generic_visit(self, node): + if node is None: + return '' + else: + return ''.join(self.visit(c) for c_name, c in node.children()) + + def visit_Constant(self, n): + return n.value + + def visit_ID(self, n): + return n.name + + def visit_Pragma(self, n): + ret = '#pragma' + if n.string: + ret += ' ' + n.string + return ret + + def visit_ArrayRef(self, n): + arrref = self._parenthesize_unless_simple(n.name) + return arrref + '[' + self.visit(n.subscript) + ']' + + def visit_StructRef(self, n): + sref = self._parenthesize_unless_simple(n.name) + return sref + n.type + self.visit(n.field) + + def visit_FuncCall(self, n): + fref = self._parenthesize_unless_simple(n.name) + return fref + '(' + self.visit(n.args) + ')' + + def visit_UnaryOp(self, n): + if n.op == 'sizeof': + # Always parenthesize the argument of sizeof since it can be + # a name. + return 'sizeof(%s)' % self.visit(n.expr) + else: + operand = self._parenthesize_unless_simple(n.expr) + if n.op == 'p++': + return '%s++' % operand + elif n.op == 'p--': + return '%s--' % operand + else: + return '%s%s' % (n.op, operand) + + # Precedence map of binary operators: + precedence_map = { + # Should be in sync with c_parser.CParser.precedence + # Higher numbers are stronger binding + '||': 0, # weakest binding + '&&': 1, + '|': 2, + '^': 3, + '&': 4, + '==': 5, '!=': 5, + '>': 6, '>=': 6, '<': 6, '<=': 6, + '>>': 7, '<<': 7, + '+': 8, '-': 8, + '*': 9, '/': 9, '%': 9 # strongest binding + } + + def visit_BinaryOp(self, n): + # Note: all binary operators are left-to-right associative + # + # If `n.left.op` has a stronger or equally binding precedence in + # comparison to `n.op`, no parenthesis are needed for the left: + # e.g., `(a*b) + c` is equivalent to `a*b + c`, as well as + # `(a+b) - c` is equivalent to `a+b - c` (same precedence). + # If the left operator is weaker binding than the current, then + # parentheses are necessary: + # e.g., `(a+b) * c` is NOT equivalent to `a+b * c`. + lval_str = self._parenthesize_if( + n.left, + lambda d: not (self._is_simple_node(d) or + self.reduce_parentheses and isinstance(d, c_ast.BinaryOp) and + self.precedence_map[d.op] >= self.precedence_map[n.op])) + # If `n.right.op` has a stronger -but not equal- binding precedence, + # parenthesis can be omitted on the right: + # e.g., `a + (b*c)` is equivalent to `a + b*c`. + # If the right operator is weaker or equally binding, then parentheses + # are necessary: + # e.g., `a * (b+c)` is NOT equivalent to `a * b+c` and + # `a - (b+c)` is NOT equivalent to `a - b+c` (same precedence). + rval_str = self._parenthesize_if( + n.right, + lambda d: not (self._is_simple_node(d) or + self.reduce_parentheses and isinstance(d, c_ast.BinaryOp) and + self.precedence_map[d.op] > self.precedence_map[n.op])) + return '%s %s %s' % (lval_str, n.op, rval_str) + + def visit_Assignment(self, n): + rval_str = self._parenthesize_if( + n.rvalue, + lambda n: isinstance(n, c_ast.Assignment)) + return '%s %s %s' % (self.visit(n.lvalue), n.op, rval_str) + + def visit_IdentifierType(self, n): + return ' '.join(n.names) + + def _visit_expr(self, n): + if isinstance(n, c_ast.InitList): + return '{' + self.visit(n) + '}' + elif isinstance(n, (c_ast.ExprList, c_ast.Compound)): + return '(' + self.visit(n) + ')' + else: + return self.visit(n) + + def visit_Decl(self, n, no_type=False): + # no_type is used when a Decl is part of a DeclList, where the type is + # explicitly only for the first declaration in a list. + # + s = n.name if no_type else self._generate_decl(n) + if n.bitsize: s += ' : ' + self.visit(n.bitsize) + if n.init: + s += ' = ' + self._visit_expr(n.init) + return s + + def visit_DeclList(self, n): + s = self.visit(n.decls[0]) + if len(n.decls) > 1: + s += ', ' + ', '.join(self.visit_Decl(decl, no_type=True) + for decl in n.decls[1:]) + return s + + def visit_Typedef(self, n): + s = '' + if n.storage: s += ' '.join(n.storage) + ' ' + s += self._generate_type(n.type) + return s + + def visit_Cast(self, n): + s = '(' + self._generate_type(n.to_type, emit_declname=False) + ')' + return s + ' ' + self._parenthesize_unless_simple(n.expr) + + def visit_ExprList(self, n): + visited_subexprs = [] + for expr in n.exprs: + visited_subexprs.append(self._visit_expr(expr)) + return ', '.join(visited_subexprs) + + def visit_InitList(self, n): + visited_subexprs = [] + for expr in n.exprs: + visited_subexprs.append(self._visit_expr(expr)) + return ', '.join(visited_subexprs) + + def visit_Enum(self, n): + return self._generate_struct_union_enum(n, name='enum') + + def visit_Alignas(self, n): + return '_Alignas({})'.format(self.visit(n.alignment)) + + def visit_Enumerator(self, n): + if not n.value: + return '{indent}{name},\n'.format( + indent=self._make_indent(), + name=n.name, + ) + else: + return '{indent}{name} = {value},\n'.format( + indent=self._make_indent(), + name=n.name, + value=self.visit(n.value), + ) + + def visit_FuncDef(self, n): + decl = self.visit(n.decl) + self.indent_level = 0 + body = self.visit(n.body) + if n.param_decls: + knrdecls = ';\n'.join(self.visit(p) for p in n.param_decls) + return decl + '\n' + knrdecls + ';\n' + body + '\n' + else: + return decl + '\n' + body + '\n' + + def visit_FileAST(self, n): + s = '' + for ext in n.ext: + if isinstance(ext, c_ast.FuncDef): + s += self.visit(ext) + elif isinstance(ext, c_ast.Pragma): + s += self.visit(ext) + '\n' + else: + s += self.visit(ext) + ';\n' + return s + + def visit_Compound(self, n): + s = self._make_indent() + '{\n' + self.indent_level += 2 + if n.block_items: + s += ''.join(self._generate_stmt(stmt) for stmt in n.block_items) + self.indent_level -= 2 + s += self._make_indent() + '}\n' + return s + + def visit_CompoundLiteral(self, n): + return '(' + self.visit(n.type) + '){' + self.visit(n.init) + '}' + + + def visit_EmptyStatement(self, n): + return ';' + + def visit_ParamList(self, n): + return ', '.join(self.visit(param) for param in n.params) + + def visit_Return(self, n): + s = 'return' + if n.expr: s += ' ' + self.visit(n.expr) + return s + ';' + + def visit_Break(self, n): + return 'break;' + + def visit_Continue(self, n): + return 'continue;' + + def visit_TernaryOp(self, n): + s = '(' + self._visit_expr(n.cond) + ') ? ' + s += '(' + self._visit_expr(n.iftrue) + ') : ' + s += '(' + self._visit_expr(n.iffalse) + ')' + return s + + def visit_If(self, n): + s = 'if (' + if n.cond: s += self.visit(n.cond) + s += ')\n' + s += self._generate_stmt(n.iftrue, add_indent=True) + if n.iffalse: + s += self._make_indent() + 'else\n' + s += self._generate_stmt(n.iffalse, add_indent=True) + return s + + def visit_For(self, n): + s = 'for (' + if n.init: s += self.visit(n.init) + s += ';' + if n.cond: s += ' ' + self.visit(n.cond) + s += ';' + if n.next: s += ' ' + self.visit(n.next) + s += ')\n' + s += self._generate_stmt(n.stmt, add_indent=True) + return s + + def visit_While(self, n): + s = 'while (' + if n.cond: s += self.visit(n.cond) + s += ')\n' + s += self._generate_stmt(n.stmt, add_indent=True) + return s + + def visit_DoWhile(self, n): + s = 'do\n' + s += self._generate_stmt(n.stmt, add_indent=True) + s += self._make_indent() + 'while (' + if n.cond: s += self.visit(n.cond) + s += ');' + return s + + def visit_StaticAssert(self, n): + s = '_Static_assert(' + s += self.visit(n.cond) + if n.message: + s += ',' + s += self.visit(n.message) + s += ')' + return s + + def visit_Switch(self, n): + s = 'switch (' + self.visit(n.cond) + ')\n' + s += self._generate_stmt(n.stmt, add_indent=True) + return s + + def visit_Case(self, n): + s = 'case ' + self.visit(n.expr) + ':\n' + for stmt in n.stmts: + s += self._generate_stmt(stmt, add_indent=True) + return s + + def visit_Default(self, n): + s = 'default:\n' + for stmt in n.stmts: + s += self._generate_stmt(stmt, add_indent=True) + return s + + def visit_Label(self, n): + return n.name + ':\n' + self._generate_stmt(n.stmt) + + def visit_Goto(self, n): + return 'goto ' + n.name + ';' + + def visit_EllipsisParam(self, n): + return '...' + + def visit_Struct(self, n): + return self._generate_struct_union_enum(n, 'struct') + + def visit_Typename(self, n): + return self._generate_type(n.type) + + def visit_Union(self, n): + return self._generate_struct_union_enum(n, 'union') + + def visit_NamedInitializer(self, n): + s = '' + for name in n.name: + if isinstance(name, c_ast.ID): + s += '.' + name.name + else: + s += '[' + self.visit(name) + ']' + s += ' = ' + self._visit_expr(n.expr) + return s + + def visit_FuncDecl(self, n): + return self._generate_type(n) + + def visit_ArrayDecl(self, n): + return self._generate_type(n, emit_declname=False) + + def visit_TypeDecl(self, n): + return self._generate_type(n, emit_declname=False) + + def visit_PtrDecl(self, n): + return self._generate_type(n, emit_declname=False) + + def _generate_struct_union_enum(self, n, name): + """ Generates code for structs, unions, and enums. name should be + 'struct', 'union', or 'enum'. + """ + if name in ('struct', 'union'): + members = n.decls + body_function = self._generate_struct_union_body + else: + assert name == 'enum' + members = None if n.values is None else n.values.enumerators + body_function = self._generate_enum_body + s = name + ' ' + (n.name or '') + if members is not None: + # None means no members + # Empty sequence means an empty list of members + s += '\n' + s += self._make_indent() + self.indent_level += 2 + s += '{\n' + s += body_function(members) + self.indent_level -= 2 + s += self._make_indent() + '}' + return s + + def _generate_struct_union_body(self, members): + return ''.join(self._generate_stmt(decl) for decl in members) + + def _generate_enum_body(self, members): + # `[:-2] + '\n'` removes the final `,` from the enumerator list + return ''.join(self.visit(value) for value in members)[:-2] + '\n' + + def _generate_stmt(self, n, add_indent=False): + """ Generation from a statement node. This method exists as a wrapper + for individual visit_* methods to handle different treatment of + some statements in this context. + """ + typ = type(n) + if add_indent: self.indent_level += 2 + indent = self._make_indent() + if add_indent: self.indent_level -= 2 + + if typ in ( + c_ast.Decl, c_ast.Assignment, c_ast.Cast, c_ast.UnaryOp, + c_ast.BinaryOp, c_ast.TernaryOp, c_ast.FuncCall, c_ast.ArrayRef, + c_ast.StructRef, c_ast.Constant, c_ast.ID, c_ast.Typedef, + c_ast.ExprList): + # These can also appear in an expression context so no semicolon + # is added to them automatically + # + return indent + self.visit(n) + ';\n' + elif typ in (c_ast.Compound,): + # No extra indentation required before the opening brace of a + # compound - because it consists of multiple lines it has to + # compute its own indentation. + # + return self.visit(n) + elif typ in (c_ast.If,): + return indent + self.visit(n) + else: + return indent + self.visit(n) + '\n' + + def _generate_decl(self, n): + """ Generation from a Decl node. + """ + s = '' + if n.funcspec: s = ' '.join(n.funcspec) + ' ' + if n.storage: s += ' '.join(n.storage) + ' ' + if n.align: s += self.visit(n.align[0]) + ' ' + s += self._generate_type(n.type) + return s + + def _generate_type(self, n, modifiers=[], emit_declname = True): + """ Recursive generation from a type node. n is the type node. + modifiers collects the PtrDecl, ArrayDecl and FuncDecl modifiers + encountered on the way down to a TypeDecl, to allow proper + generation from it. + """ + typ = type(n) + #~ print(n, modifiers) + + if typ == c_ast.TypeDecl: + s = '' + if n.quals: s += ' '.join(n.quals) + ' ' + s += self.visit(n.type) + + nstr = n.declname if n.declname and emit_declname else '' + # Resolve modifiers. + # Wrap in parens to distinguish pointer to array and pointer to + # function syntax. + # + for i, modifier in enumerate(modifiers): + if isinstance(modifier, c_ast.ArrayDecl): + if (i != 0 and + isinstance(modifiers[i - 1], c_ast.PtrDecl)): + nstr = '(' + nstr + ')' + nstr += '[' + if modifier.dim_quals: + nstr += ' '.join(modifier.dim_quals) + ' ' + nstr += self.visit(modifier.dim) + ']' + elif isinstance(modifier, c_ast.FuncDecl): + if (i != 0 and + isinstance(modifiers[i - 1], c_ast.PtrDecl)): + nstr = '(' + nstr + ')' + nstr += '(' + self.visit(modifier.args) + ')' + elif isinstance(modifier, c_ast.PtrDecl): + if modifier.quals: + nstr = '* %s%s' % (' '.join(modifier.quals), + ' ' + nstr if nstr else '') + else: + nstr = '*' + nstr + if nstr: s += ' ' + nstr + return s + elif typ == c_ast.Decl: + return self._generate_decl(n.type) + elif typ == c_ast.Typename: + return self._generate_type(n.type, emit_declname = emit_declname) + elif typ == c_ast.IdentifierType: + return ' '.join(n.names) + ' ' + elif typ in (c_ast.ArrayDecl, c_ast.PtrDecl, c_ast.FuncDecl): + return self._generate_type(n.type, modifiers + [n], + emit_declname = emit_declname) + else: + return self.visit(n) + + def _parenthesize_if(self, n, condition): + """ Visits 'n' and returns its string representation, parenthesized + if the condition function applied to the node returns True. + """ + s = self._visit_expr(n) + if condition(n): + return '(' + s + ')' + else: + return s + + def _parenthesize_unless_simple(self, n): + """ Common use case for _parenthesize_if + """ + return self._parenthesize_if(n, lambda d: not self._is_simple_node(d)) + + def _is_simple_node(self, n): + """ Returns True for nodes that are "simple" - i.e. nodes that always + have higher precedence than operators. + """ + return isinstance(n, (c_ast.Constant, c_ast.ID, c_ast.ArrayRef, + c_ast.StructRef, c_ast.FuncCall)) diff --git a/venv/lib/python3.12/site-packages/pycparser/c_lexer.py b/venv/lib/python3.12/site-packages/pycparser/c_lexer.py new file mode 100644 index 00000000..135826d1 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pycparser/c_lexer.py @@ -0,0 +1,569 @@ +#------------------------------------------------------------------------------ +# pycparser: c_lexer.py +# +# CLexer class: lexer for the C language +# +# Eli Bendersky [https://eli.thegreenplace.net/] +# License: BSD +#------------------------------------------------------------------------------ +import re + +from .ply import lex +from .ply.lex import TOKEN + + +class CLexer(object): + """ A lexer for the C language. After building it, set the + input text with input(), and call token() to get new + tokens. + + The public attribute filename can be set to an initial + filename, but the lexer will update it upon #line + directives. + """ + def __init__(self, error_func, on_lbrace_func, on_rbrace_func, + type_lookup_func): + """ Create a new Lexer. + + error_func: + An error function. Will be called with an error + message, line and column as arguments, in case of + an error during lexing. + + on_lbrace_func, on_rbrace_func: + Called when an LBRACE or RBRACE is encountered + (likely to push/pop type_lookup_func's scope) + + type_lookup_func: + A type lookup function. Given a string, it must + return True IFF this string is a name of a type + that was defined with a typedef earlier. + """ + self.error_func = error_func + self.on_lbrace_func = on_lbrace_func + self.on_rbrace_func = on_rbrace_func + self.type_lookup_func = type_lookup_func + self.filename = '' + + # Keeps track of the last token returned from self.token() + self.last_token = None + + # Allow either "# line" or "# " to support GCC's + # cpp output + # + self.line_pattern = re.compile(r'([ \t]*line\W)|([ \t]*\d+)') + self.pragma_pattern = re.compile(r'[ \t]*pragma\W') + + def build(self, **kwargs): + """ Builds the lexer from the specification. Must be + called after the lexer object is created. + + This method exists separately, because the PLY + manual warns against calling lex.lex inside + __init__ + """ + self.lexer = lex.lex(object=self, **kwargs) + + def reset_lineno(self): + """ Resets the internal line number counter of the lexer. + """ + self.lexer.lineno = 1 + + def input(self, text): + self.lexer.input(text) + + def token(self): + self.last_token = self.lexer.token() + return self.last_token + + def find_tok_column(self, token): + """ Find the column of the token in its line. + """ + last_cr = self.lexer.lexdata.rfind('\n', 0, token.lexpos) + return token.lexpos - last_cr + + ######################-- PRIVATE --###################### + + ## + ## Internal auxiliary methods + ## + def _error(self, msg, token): + location = self._make_tok_location(token) + self.error_func(msg, location[0], location[1]) + self.lexer.skip(1) + + def _make_tok_location(self, token): + return (token.lineno, self.find_tok_column(token)) + + ## + ## Reserved keywords + ## + keywords = ( + 'AUTO', 'BREAK', 'CASE', 'CHAR', 'CONST', + 'CONTINUE', 'DEFAULT', 'DO', 'DOUBLE', 'ELSE', 'ENUM', 'EXTERN', + 'FLOAT', 'FOR', 'GOTO', 'IF', 'INLINE', 'INT', 'LONG', + 'REGISTER', 'OFFSETOF', + 'RESTRICT', 'RETURN', 'SHORT', 'SIGNED', 'SIZEOF', 'STATIC', 'STRUCT', + 'SWITCH', 'TYPEDEF', 'UNION', 'UNSIGNED', 'VOID', + 'VOLATILE', 'WHILE', '__INT128', + ) + + keywords_new = ( + '_BOOL', '_COMPLEX', + '_NORETURN', '_THREAD_LOCAL', '_STATIC_ASSERT', + '_ATOMIC', '_ALIGNOF', '_ALIGNAS', + '_PRAGMA', + ) + + keyword_map = {} + + for keyword in keywords: + keyword_map[keyword.lower()] = keyword + + for keyword in keywords_new: + keyword_map[keyword[:2].upper() + keyword[2:].lower()] = keyword + + ## + ## All the tokens recognized by the lexer + ## + tokens = keywords + keywords_new + ( + # Identifiers + 'ID', + + # Type identifiers (identifiers previously defined as + # types with typedef) + 'TYPEID', + + # constants + 'INT_CONST_DEC', 'INT_CONST_OCT', 'INT_CONST_HEX', 'INT_CONST_BIN', 'INT_CONST_CHAR', + 'FLOAT_CONST', 'HEX_FLOAT_CONST', + 'CHAR_CONST', + 'WCHAR_CONST', + 'U8CHAR_CONST', + 'U16CHAR_CONST', + 'U32CHAR_CONST', + + # String literals + 'STRING_LITERAL', + 'WSTRING_LITERAL', + 'U8STRING_LITERAL', + 'U16STRING_LITERAL', + 'U32STRING_LITERAL', + + # Operators + 'PLUS', 'MINUS', 'TIMES', 'DIVIDE', 'MOD', + 'OR', 'AND', 'NOT', 'XOR', 'LSHIFT', 'RSHIFT', + 'LOR', 'LAND', 'LNOT', + 'LT', 'LE', 'GT', 'GE', 'EQ', 'NE', + + # Assignment + 'EQUALS', 'TIMESEQUAL', 'DIVEQUAL', 'MODEQUAL', + 'PLUSEQUAL', 'MINUSEQUAL', + 'LSHIFTEQUAL','RSHIFTEQUAL', 'ANDEQUAL', 'XOREQUAL', + 'OREQUAL', + + # Increment/decrement + 'PLUSPLUS', 'MINUSMINUS', + + # Structure dereference (->) + 'ARROW', + + # Conditional operator (?) + 'CONDOP', + + # Delimiters + 'LPAREN', 'RPAREN', # ( ) + 'LBRACKET', 'RBRACKET', # [ ] + 'LBRACE', 'RBRACE', # { } + 'COMMA', 'PERIOD', # . , + 'SEMI', 'COLON', # ; : + + # Ellipsis (...) + 'ELLIPSIS', + + # pre-processor + 'PPHASH', # '#' + 'PPPRAGMA', # 'pragma' + 'PPPRAGMASTR', + ) + + ## + ## Regexes for use in tokens + ## + ## + + # valid C identifiers (K&R2: A.2.3), plus '$' (supported by some compilers) + identifier = r'[a-zA-Z_$][0-9a-zA-Z_$]*' + + hex_prefix = '0[xX]' + hex_digits = '[0-9a-fA-F]+' + bin_prefix = '0[bB]' + bin_digits = '[01]+' + + # integer constants (K&R2: A.2.5.1) + integer_suffix_opt = r'(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?' + decimal_constant = '(0'+integer_suffix_opt+')|([1-9][0-9]*'+integer_suffix_opt+')' + octal_constant = '0[0-7]*'+integer_suffix_opt + hex_constant = hex_prefix+hex_digits+integer_suffix_opt + bin_constant = bin_prefix+bin_digits+integer_suffix_opt + + bad_octal_constant = '0[0-7]*[89]' + + # comments are not supported + unsupported_c_style_comment = r'\/\*' + unsupported_cxx_style_comment = r'\/\/' + + # character constants (K&R2: A.2.5.2) + # Note: a-zA-Z and '.-~^_!=&;,' are allowed as escape chars to support #line + # directives with Windows paths as filenames (..\..\dir\file) + # For the same reason, decimal_escape allows all digit sequences. We want to + # parse all correct code, even if it means to sometimes parse incorrect + # code. + # + # The original regexes were taken verbatim from the C syntax definition, + # and were later modified to avoid worst-case exponential running time. + # + # simple_escape = r"""([a-zA-Z._~!=&\^\-\\?'"])""" + # decimal_escape = r"""(\d+)""" + # hex_escape = r"""(x[0-9a-fA-F]+)""" + # bad_escape = r"""([\\][^a-zA-Z._~^!=&\^\-\\?'"x0-7])""" + # + # The following modifications were made to avoid the ambiguity that allowed backtracking: + # (https://github.com/eliben/pycparser/issues/61) + # + # - \x was removed from simple_escape, unless it was not followed by a hex digit, to avoid ambiguity with hex_escape. + # - hex_escape allows one or more hex characters, but requires that the next character(if any) is not hex + # - decimal_escape allows one or more decimal characters, but requires that the next character(if any) is not a decimal + # - bad_escape does not allow any decimals (8-9), to avoid conflicting with the permissive decimal_escape. + # + # Without this change, python's `re` module would recursively try parsing each ambiguous escape sequence in multiple ways. + # e.g. `\123` could be parsed as `\1`+`23`, `\12`+`3`, and `\123`. + + simple_escape = r"""([a-wyzA-Z._~!=&\^\-\\?'"]|x(?![0-9a-fA-F]))""" + decimal_escape = r"""(\d+)(?!\d)""" + hex_escape = r"""(x[0-9a-fA-F]+)(?![0-9a-fA-F])""" + bad_escape = r"""([\\][^a-zA-Z._~^!=&\^\-\\?'"x0-9])""" + + escape_sequence = r"""(\\("""+simple_escape+'|'+decimal_escape+'|'+hex_escape+'))' + + # This complicated regex with lookahead might be slow for strings, so because all of the valid escapes (including \x) allowed + # 0 or more non-escaped characters after the first character, simple_escape+decimal_escape+hex_escape got simplified to + + escape_sequence_start_in_string = r"""(\\[0-9a-zA-Z._~!=&\^\-\\?'"])""" + + cconst_char = r"""([^'\\\n]|"""+escape_sequence+')' + char_const = "'"+cconst_char+"'" + wchar_const = 'L'+char_const + u8char_const = 'u8'+char_const + u16char_const = 'u'+char_const + u32char_const = 'U'+char_const + multicharacter_constant = "'"+cconst_char+"{2,4}'" + unmatched_quote = "('"+cconst_char+"*\\n)|('"+cconst_char+"*$)" + bad_char_const = r"""('"""+cconst_char+"""[^'\n]+')|('')|('"""+bad_escape+r"""[^'\n]*')""" + + # string literals (K&R2: A.2.6) + string_char = r"""([^"\\\n]|"""+escape_sequence_start_in_string+')' + string_literal = '"'+string_char+'*"' + wstring_literal = 'L'+string_literal + u8string_literal = 'u8'+string_literal + u16string_literal = 'u'+string_literal + u32string_literal = 'U'+string_literal + bad_string_literal = '"'+string_char+'*'+bad_escape+string_char+'*"' + + # floating constants (K&R2: A.2.5.3) + exponent_part = r"""([eE][-+]?[0-9]+)""" + fractional_constant = r"""([0-9]*\.[0-9]+)|([0-9]+\.)""" + floating_constant = '(((('+fractional_constant+')'+exponent_part+'?)|([0-9]+'+exponent_part+'))[FfLl]?)' + binary_exponent_part = r'''([pP][+-]?[0-9]+)''' + hex_fractional_constant = '((('+hex_digits+r""")?\."""+hex_digits+')|('+hex_digits+r"""\.))""" + hex_floating_constant = '('+hex_prefix+'('+hex_digits+'|'+hex_fractional_constant+')'+binary_exponent_part+'[FfLl]?)' + + ## + ## Lexer states: used for preprocessor \n-terminated directives + ## + states = ( + # ppline: preprocessor line directives + # + ('ppline', 'exclusive'), + + # pppragma: pragma + # + ('pppragma', 'exclusive'), + ) + + def t_PPHASH(self, t): + r'[ \t]*\#' + if self.line_pattern.match(t.lexer.lexdata, pos=t.lexer.lexpos): + t.lexer.begin('ppline') + self.pp_line = self.pp_filename = None + elif self.pragma_pattern.match(t.lexer.lexdata, pos=t.lexer.lexpos): + t.lexer.begin('pppragma') + else: + t.type = 'PPHASH' + return t + + ## + ## Rules for the ppline state + ## + @TOKEN(string_literal) + def t_ppline_FILENAME(self, t): + if self.pp_line is None: + self._error('filename before line number in #line', t) + else: + self.pp_filename = t.value.lstrip('"').rstrip('"') + + @TOKEN(decimal_constant) + def t_ppline_LINE_NUMBER(self, t): + if self.pp_line is None: + self.pp_line = t.value + else: + # Ignore: GCC's cpp sometimes inserts a numeric flag + # after the file name + pass + + def t_ppline_NEWLINE(self, t): + r'\n' + if self.pp_line is None: + self._error('line number missing in #line', t) + else: + self.lexer.lineno = int(self.pp_line) + + if self.pp_filename is not None: + self.filename = self.pp_filename + + t.lexer.begin('INITIAL') + + def t_ppline_PPLINE(self, t): + r'line' + pass + + t_ppline_ignore = ' \t' + + def t_ppline_error(self, t): + self._error('invalid #line directive', t) + + ## + ## Rules for the pppragma state + ## + def t_pppragma_NEWLINE(self, t): + r'\n' + t.lexer.lineno += 1 + t.lexer.begin('INITIAL') + + def t_pppragma_PPPRAGMA(self, t): + r'pragma' + return t + + t_pppragma_ignore = ' \t' + + def t_pppragma_STR(self, t): + '.+' + t.type = 'PPPRAGMASTR' + return t + + def t_pppragma_error(self, t): + self._error('invalid #pragma directive', t) + + ## + ## Rules for the normal state + ## + t_ignore = ' \t' + + # Newlines + def t_NEWLINE(self, t): + r'\n+' + t.lexer.lineno += t.value.count("\n") + + # Operators + t_PLUS = r'\+' + t_MINUS = r'-' + t_TIMES = r'\*' + t_DIVIDE = r'/' + t_MOD = r'%' + t_OR = r'\|' + t_AND = r'&' + t_NOT = r'~' + t_XOR = r'\^' + t_LSHIFT = r'<<' + t_RSHIFT = r'>>' + t_LOR = r'\|\|' + t_LAND = r'&&' + t_LNOT = r'!' + t_LT = r'<' + t_GT = r'>' + t_LE = r'<=' + t_GE = r'>=' + t_EQ = r'==' + t_NE = r'!=' + + # Assignment operators + t_EQUALS = r'=' + t_TIMESEQUAL = r'\*=' + t_DIVEQUAL = r'/=' + t_MODEQUAL = r'%=' + t_PLUSEQUAL = r'\+=' + t_MINUSEQUAL = r'-=' + t_LSHIFTEQUAL = r'<<=' + t_RSHIFTEQUAL = r'>>=' + t_ANDEQUAL = r'&=' + t_OREQUAL = r'\|=' + t_XOREQUAL = r'\^=' + + # Increment/decrement + t_PLUSPLUS = r'\+\+' + t_MINUSMINUS = r'--' + + # -> + t_ARROW = r'->' + + # ? + t_CONDOP = r'\?' + + # Delimiters + t_LPAREN = r'\(' + t_RPAREN = r'\)' + t_LBRACKET = r'\[' + t_RBRACKET = r'\]' + t_COMMA = r',' + t_PERIOD = r'\.' + t_SEMI = r';' + t_COLON = r':' + t_ELLIPSIS = r'\.\.\.' + + # Scope delimiters + # To see why on_lbrace_func is needed, consider: + # typedef char TT; + # void foo(int TT) { TT = 10; } + # TT x = 5; + # Outside the function, TT is a typedef, but inside (starting and ending + # with the braces) it's a parameter. The trouble begins with yacc's + # lookahead token. If we open a new scope in brace_open, then TT has + # already been read and incorrectly interpreted as TYPEID. So, we need + # to open and close scopes from within the lexer. + # Similar for the TT immediately outside the end of the function. + # + @TOKEN(r'\{') + def t_LBRACE(self, t): + self.on_lbrace_func() + return t + @TOKEN(r'\}') + def t_RBRACE(self, t): + self.on_rbrace_func() + return t + + t_STRING_LITERAL = string_literal + + # The following floating and integer constants are defined as + # functions to impose a strict order (otherwise, decimal + # is placed before the others because its regex is longer, + # and this is bad) + # + @TOKEN(floating_constant) + def t_FLOAT_CONST(self, t): + return t + + @TOKEN(hex_floating_constant) + def t_HEX_FLOAT_CONST(self, t): + return t + + @TOKEN(hex_constant) + def t_INT_CONST_HEX(self, t): + return t + + @TOKEN(bin_constant) + def t_INT_CONST_BIN(self, t): + return t + + @TOKEN(bad_octal_constant) + def t_BAD_CONST_OCT(self, t): + msg = "Invalid octal constant" + self._error(msg, t) + + @TOKEN(unsupported_c_style_comment) + def t_UNSUPPORTED_C_STYLE_COMMENT(self, t): + msg = "Comments are not supported, see https://github.com/eliben/pycparser#3using." + self._error(msg, t) + + @TOKEN(unsupported_cxx_style_comment) + def t_UNSUPPORTED_CXX_STYLE_COMMENT(self, t): + msg = "Comments are not supported, see https://github.com/eliben/pycparser#3using." + self._error(msg, t) + + @TOKEN(octal_constant) + def t_INT_CONST_OCT(self, t): + return t + + @TOKEN(decimal_constant) + def t_INT_CONST_DEC(self, t): + return t + + # Must come before bad_char_const, to prevent it from + # catching valid char constants as invalid + # + @TOKEN(multicharacter_constant) + def t_INT_CONST_CHAR(self, t): + return t + + @TOKEN(char_const) + def t_CHAR_CONST(self, t): + return t + + @TOKEN(wchar_const) + def t_WCHAR_CONST(self, t): + return t + + @TOKEN(u8char_const) + def t_U8CHAR_CONST(self, t): + return t + + @TOKEN(u16char_const) + def t_U16CHAR_CONST(self, t): + return t + + @TOKEN(u32char_const) + def t_U32CHAR_CONST(self, t): + return t + + @TOKEN(unmatched_quote) + def t_UNMATCHED_QUOTE(self, t): + msg = "Unmatched '" + self._error(msg, t) + + @TOKEN(bad_char_const) + def t_BAD_CHAR_CONST(self, t): + msg = "Invalid char constant %s" % t.value + self._error(msg, t) + + @TOKEN(wstring_literal) + def t_WSTRING_LITERAL(self, t): + return t + + @TOKEN(u8string_literal) + def t_U8STRING_LITERAL(self, t): + return t + + @TOKEN(u16string_literal) + def t_U16STRING_LITERAL(self, t): + return t + + @TOKEN(u32string_literal) + def t_U32STRING_LITERAL(self, t): + return t + + # unmatched string literals are caught by the preprocessor + + @TOKEN(bad_string_literal) + def t_BAD_STRING_LITERAL(self, t): + msg = "String contains invalid escape code" + self._error(msg, t) + + @TOKEN(identifier) + def t_ID(self, t): + t.type = self.keyword_map.get(t.value, "ID") + if t.type == 'ID' and self.type_lookup_func(t.value): + t.type = "TYPEID" + return t + + def t_error(self, t): + msg = 'Illegal character %s' % repr(t.value[0]) + self._error(msg, t) diff --git a/venv/lib/python3.12/site-packages/pycparser/c_parser.py b/venv/lib/python3.12/site-packages/pycparser/c_parser.py new file mode 100644 index 00000000..bb123ac7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pycparser/c_parser.py @@ -0,0 +1,1973 @@ +#------------------------------------------------------------------------------ +# pycparser: c_parser.py +# +# CParser class: Parser and AST builder for the C language +# +# Eli Bendersky [https://eli.thegreenplace.net/] +# License: BSD +#------------------------------------------------------------------------------ +from .ply import yacc + +from . import c_ast +from .c_lexer import CLexer +from .plyparser import PLYParser, ParseError, parameterized, template +from .ast_transforms import fix_switch_cases, fix_atomic_specifiers + + +@template +class CParser(PLYParser): + def __init__( + self, + lex_optimize=True, + lexer=CLexer, + lextab='pycparser.lextab', + yacc_optimize=True, + yacctab='pycparser.yacctab', + yacc_debug=False, + taboutputdir=''): + """ Create a new CParser. + + Some arguments for controlling the debug/optimization + level of the parser are provided. The defaults are + tuned for release/performance mode. + The simple rules for using them are: + *) When tweaking CParser/CLexer, set these to False + *) When releasing a stable parser, set to True + + lex_optimize: + Set to False when you're modifying the lexer. + Otherwise, changes in the lexer won't be used, if + some lextab.py file exists. + When releasing with a stable lexer, set to True + to save the re-generation of the lexer table on + each run. + + lexer: + Set this parameter to define the lexer to use if + you're not using the default CLexer. + + lextab: + Points to the lex table that's used for optimized + mode. Only if you're modifying the lexer and want + some tests to avoid re-generating the table, make + this point to a local lex table file (that's been + earlier generated with lex_optimize=True) + + yacc_optimize: + Set to False when you're modifying the parser. + Otherwise, changes in the parser won't be used, if + some parsetab.py file exists. + When releasing with a stable parser, set to True + to save the re-generation of the parser table on + each run. + + yacctab: + Points to the yacc table that's used for optimized + mode. Only if you're modifying the parser, make + this point to a local yacc table file + + yacc_debug: + Generate a parser.out file that explains how yacc + built the parsing table from the grammar. + + taboutputdir: + Set this parameter to control the location of generated + lextab and yacctab files. + """ + self.clex = lexer( + error_func=self._lex_error_func, + on_lbrace_func=self._lex_on_lbrace_func, + on_rbrace_func=self._lex_on_rbrace_func, + type_lookup_func=self._lex_type_lookup_func) + + self.clex.build( + optimize=lex_optimize, + lextab=lextab, + outputdir=taboutputdir) + self.tokens = self.clex.tokens + + rules_with_opt = [ + 'abstract_declarator', + 'assignment_expression', + 'declaration_list', + 'declaration_specifiers_no_type', + 'designation', + 'expression', + 'identifier_list', + 'init_declarator_list', + 'id_init_declarator_list', + 'initializer_list', + 'parameter_type_list', + 'block_item_list', + 'type_qualifier_list', + 'struct_declarator_list' + ] + + for rule in rules_with_opt: + self._create_opt_rule(rule) + + self.cparser = yacc.yacc( + module=self, + start='translation_unit_or_empty', + debug=yacc_debug, + optimize=yacc_optimize, + tabmodule=yacctab, + outputdir=taboutputdir) + + # Stack of scopes for keeping track of symbols. _scope_stack[-1] is + # the current (topmost) scope. Each scope is a dictionary that + # specifies whether a name is a type. If _scope_stack[n][name] is + # True, 'name' is currently a type in the scope. If it's False, + # 'name' is used in the scope but not as a type (for instance, if we + # saw: int name; + # If 'name' is not a key in _scope_stack[n] then 'name' was not defined + # in this scope at all. + self._scope_stack = [dict()] + + # Keeps track of the last token given to yacc (the lookahead token) + self._last_yielded_token = None + + def parse(self, text, filename='', debug=False): + """ Parses C code and returns an AST. + + text: + A string containing the C source code + + filename: + Name of the file being parsed (for meaningful + error messages) + + debug: + Debug flag to YACC + """ + self.clex.filename = filename + self.clex.reset_lineno() + self._scope_stack = [dict()] + self._last_yielded_token = None + return self.cparser.parse( + input=text, + lexer=self.clex, + debug=debug) + + ######################-- PRIVATE --###################### + + def _push_scope(self): + self._scope_stack.append(dict()) + + def _pop_scope(self): + assert len(self._scope_stack) > 1 + self._scope_stack.pop() + + def _add_typedef_name(self, name, coord): + """ Add a new typedef name (ie a TYPEID) to the current scope + """ + if not self._scope_stack[-1].get(name, True): + self._parse_error( + "Typedef %r previously declared as non-typedef " + "in this scope" % name, coord) + self._scope_stack[-1][name] = True + + def _add_identifier(self, name, coord): + """ Add a new object, function, or enum member name (ie an ID) to the + current scope + """ + if self._scope_stack[-1].get(name, False): + self._parse_error( + "Non-typedef %r previously declared as typedef " + "in this scope" % name, coord) + self._scope_stack[-1][name] = False + + def _is_type_in_scope(self, name): + """ Is *name* a typedef-name in the current scope? + """ + for scope in reversed(self._scope_stack): + # If name is an identifier in this scope it shadows typedefs in + # higher scopes. + in_scope = scope.get(name) + if in_scope is not None: return in_scope + return False + + def _lex_error_func(self, msg, line, column): + self._parse_error(msg, self._coord(line, column)) + + def _lex_on_lbrace_func(self): + self._push_scope() + + def _lex_on_rbrace_func(self): + self._pop_scope() + + def _lex_type_lookup_func(self, name): + """ Looks up types that were previously defined with + typedef. + Passed to the lexer for recognizing identifiers that + are types. + """ + is_type = self._is_type_in_scope(name) + return is_type + + def _get_yacc_lookahead_token(self): + """ We need access to yacc's lookahead token in certain cases. + This is the last token yacc requested from the lexer, so we + ask the lexer. + """ + return self.clex.last_token + + # To understand what's going on here, read sections A.8.5 and + # A.8.6 of K&R2 very carefully. + # + # A C type consists of a basic type declaration, with a list + # of modifiers. For example: + # + # int *c[5]; + # + # The basic declaration here is 'int c', and the pointer and + # the array are the modifiers. + # + # Basic declarations are represented by TypeDecl (from module c_ast) and the + # modifiers are FuncDecl, PtrDecl and ArrayDecl. + # + # The standard states that whenever a new modifier is parsed, it should be + # added to the end of the list of modifiers. For example: + # + # K&R2 A.8.6.2: Array Declarators + # + # In a declaration T D where D has the form + # D1 [constant-expression-opt] + # and the type of the identifier in the declaration T D1 is + # "type-modifier T", the type of the + # identifier of D is "type-modifier array of T" + # + # This is what this method does. The declarator it receives + # can be a list of declarators ending with TypeDecl. It + # tacks the modifier to the end of this list, just before + # the TypeDecl. + # + # Additionally, the modifier may be a list itself. This is + # useful for pointers, that can come as a chain from the rule + # p_pointer. In this case, the whole modifier list is spliced + # into the new location. + def _type_modify_decl(self, decl, modifier): + """ Tacks a type modifier on a declarator, and returns + the modified declarator. + + Note: the declarator and modifier may be modified + """ + #~ print '****' + #~ decl.show(offset=3) + #~ modifier.show(offset=3) + #~ print '****' + + modifier_head = modifier + modifier_tail = modifier + + # The modifier may be a nested list. Reach its tail. + while modifier_tail.type: + modifier_tail = modifier_tail.type + + # If the decl is a basic type, just tack the modifier onto it. + if isinstance(decl, c_ast.TypeDecl): + modifier_tail.type = decl + return modifier + else: + # Otherwise, the decl is a list of modifiers. Reach + # its tail and splice the modifier onto the tail, + # pointing to the underlying basic type. + decl_tail = decl + + while not isinstance(decl_tail.type, c_ast.TypeDecl): + decl_tail = decl_tail.type + + modifier_tail.type = decl_tail.type + decl_tail.type = modifier_head + return decl + + # Due to the order in which declarators are constructed, + # they have to be fixed in order to look like a normal AST. + # + # When a declaration arrives from syntax construction, it has + # these problems: + # * The innermost TypeDecl has no type (because the basic + # type is only known at the uppermost declaration level) + # * The declaration has no variable name, since that is saved + # in the innermost TypeDecl + # * The typename of the declaration is a list of type + # specifiers, and not a node. Here, basic identifier types + # should be separated from more complex types like enums + # and structs. + # + # This method fixes these problems. + def _fix_decl_name_type(self, decl, typename): + """ Fixes a declaration. Modifies decl. + """ + # Reach the underlying basic type + # + type = decl + while not isinstance(type, c_ast.TypeDecl): + type = type.type + + decl.name = type.declname + type.quals = decl.quals[:] + + # The typename is a list of types. If any type in this + # list isn't an IdentifierType, it must be the only + # type in the list (it's illegal to declare "int enum ..") + # If all the types are basic, they're collected in the + # IdentifierType holder. + for tn in typename: + if not isinstance(tn, c_ast.IdentifierType): + if len(typename) > 1: + self._parse_error( + "Invalid multiple types specified", tn.coord) + else: + type.type = tn + return decl + + if not typename: + # Functions default to returning int + # + if not isinstance(decl.type, c_ast.FuncDecl): + self._parse_error( + "Missing type in declaration", decl.coord) + type.type = c_ast.IdentifierType( + ['int'], + coord=decl.coord) + else: + # At this point, we know that typename is a list of IdentifierType + # nodes. Concatenate all the names into a single list. + # + type.type = c_ast.IdentifierType( + [name for id in typename for name in id.names], + coord=typename[0].coord) + return decl + + def _add_declaration_specifier(self, declspec, newspec, kind, append=False): + """ Declaration specifiers are represented by a dictionary + with the entries: + * qual: a list of type qualifiers + * storage: a list of storage type qualifiers + * type: a list of type specifiers + * function: a list of function specifiers + * alignment: a list of alignment specifiers + + This method is given a declaration specifier, and a + new specifier of a given kind. + If `append` is True, the new specifier is added to the end of + the specifiers list, otherwise it's added at the beginning. + Returns the declaration specifier, with the new + specifier incorporated. + """ + spec = declspec or dict(qual=[], storage=[], type=[], function=[], alignment=[]) + + if append: + spec[kind].append(newspec) + else: + spec[kind].insert(0, newspec) + + return spec + + def _build_declarations(self, spec, decls, typedef_namespace=False): + """ Builds a list of declarations all sharing the given specifiers. + If typedef_namespace is true, each declared name is added + to the "typedef namespace", which also includes objects, + functions, and enum constants. + """ + is_typedef = 'typedef' in spec['storage'] + declarations = [] + + # Bit-fields are allowed to be unnamed. + if decls[0].get('bitsize') is not None: + pass + + # When redeclaring typedef names as identifiers in inner scopes, a + # problem can occur where the identifier gets grouped into + # spec['type'], leaving decl as None. This can only occur for the + # first declarator. + elif decls[0]['decl'] is None: + if len(spec['type']) < 2 or len(spec['type'][-1].names) != 1 or \ + not self._is_type_in_scope(spec['type'][-1].names[0]): + coord = '?' + for t in spec['type']: + if hasattr(t, 'coord'): + coord = t.coord + break + self._parse_error('Invalid declaration', coord) + + # Make this look as if it came from "direct_declarator:ID" + decls[0]['decl'] = c_ast.TypeDecl( + declname=spec['type'][-1].names[0], + type=None, + quals=None, + align=spec['alignment'], + coord=spec['type'][-1].coord) + # Remove the "new" type's name from the end of spec['type'] + del spec['type'][-1] + + # A similar problem can occur where the declaration ends up looking + # like an abstract declarator. Give it a name if this is the case. + elif not isinstance(decls[0]['decl'], ( + c_ast.Enum, c_ast.Struct, c_ast.Union, c_ast.IdentifierType)): + decls_0_tail = decls[0]['decl'] + while not isinstance(decls_0_tail, c_ast.TypeDecl): + decls_0_tail = decls_0_tail.type + if decls_0_tail.declname is None: + decls_0_tail.declname = spec['type'][-1].names[0] + del spec['type'][-1] + + for decl in decls: + assert decl['decl'] is not None + if is_typedef: + declaration = c_ast.Typedef( + name=None, + quals=spec['qual'], + storage=spec['storage'], + type=decl['decl'], + coord=decl['decl'].coord) + else: + declaration = c_ast.Decl( + name=None, + quals=spec['qual'], + align=spec['alignment'], + storage=spec['storage'], + funcspec=spec['function'], + type=decl['decl'], + init=decl.get('init'), + bitsize=decl.get('bitsize'), + coord=decl['decl'].coord) + + if isinstance(declaration.type, ( + c_ast.Enum, c_ast.Struct, c_ast.Union, + c_ast.IdentifierType)): + fixed_decl = declaration + else: + fixed_decl = self._fix_decl_name_type(declaration, spec['type']) + + # Add the type name defined by typedef to a + # symbol table (for usage in the lexer) + if typedef_namespace: + if is_typedef: + self._add_typedef_name(fixed_decl.name, fixed_decl.coord) + else: + self._add_identifier(fixed_decl.name, fixed_decl.coord) + + fixed_decl = fix_atomic_specifiers(fixed_decl) + declarations.append(fixed_decl) + + return declarations + + def _build_function_definition(self, spec, decl, param_decls, body): + """ Builds a function definition. + """ + if 'typedef' in spec['storage']: + self._parse_error("Invalid typedef", decl.coord) + + declaration = self._build_declarations( + spec=spec, + decls=[dict(decl=decl, init=None)], + typedef_namespace=True)[0] + + return c_ast.FuncDef( + decl=declaration, + param_decls=param_decls, + body=body, + coord=decl.coord) + + def _select_struct_union_class(self, token): + """ Given a token (either STRUCT or UNION), selects the + appropriate AST class. + """ + if token == 'struct': + return c_ast.Struct + else: + return c_ast.Union + + ## + ## Precedence and associativity of operators + ## + # If this changes, c_generator.CGenerator.precedence_map needs to change as + # well + precedence = ( + ('left', 'LOR'), + ('left', 'LAND'), + ('left', 'OR'), + ('left', 'XOR'), + ('left', 'AND'), + ('left', 'EQ', 'NE'), + ('left', 'GT', 'GE', 'LT', 'LE'), + ('left', 'RSHIFT', 'LSHIFT'), + ('left', 'PLUS', 'MINUS'), + ('left', 'TIMES', 'DIVIDE', 'MOD') + ) + + ## + ## Grammar productions + ## Implementation of the BNF defined in K&R2 A.13 + ## + + # Wrapper around a translation unit, to allow for empty input. + # Not strictly part of the C99 Grammar, but useful in practice. + def p_translation_unit_or_empty(self, p): + """ translation_unit_or_empty : translation_unit + | empty + """ + if p[1] is None: + p[0] = c_ast.FileAST([]) + else: + p[0] = c_ast.FileAST(p[1]) + + def p_translation_unit_1(self, p): + """ translation_unit : external_declaration + """ + # Note: external_declaration is already a list + p[0] = p[1] + + def p_translation_unit_2(self, p): + """ translation_unit : translation_unit external_declaration + """ + p[1].extend(p[2]) + p[0] = p[1] + + # Declarations always come as lists (because they can be + # several in one line), so we wrap the function definition + # into a list as well, to make the return value of + # external_declaration homogeneous. + def p_external_declaration_1(self, p): + """ external_declaration : function_definition + """ + p[0] = [p[1]] + + def p_external_declaration_2(self, p): + """ external_declaration : declaration + """ + p[0] = p[1] + + def p_external_declaration_3(self, p): + """ external_declaration : pp_directive + | pppragma_directive + """ + p[0] = [p[1]] + + def p_external_declaration_4(self, p): + """ external_declaration : SEMI + """ + p[0] = [] + + def p_external_declaration_5(self, p): + """ external_declaration : static_assert + """ + p[0] = p[1] + + def p_static_assert_declaration(self, p): + """ static_assert : _STATIC_ASSERT LPAREN constant_expression COMMA unified_string_literal RPAREN + | _STATIC_ASSERT LPAREN constant_expression RPAREN + """ + if len(p) == 5: + p[0] = [c_ast.StaticAssert(p[3], None, self._token_coord(p, 1))] + else: + p[0] = [c_ast.StaticAssert(p[3], p[5], self._token_coord(p, 1))] + + def p_pp_directive(self, p): + """ pp_directive : PPHASH + """ + self._parse_error('Directives not supported yet', + self._token_coord(p, 1)) + + # This encompasses two types of C99-compatible pragmas: + # - The #pragma directive: + # # pragma character_sequence + # - The _Pragma unary operator: + # _Pragma ( " string_literal " ) + def p_pppragma_directive(self, p): + """ pppragma_directive : PPPRAGMA + | PPPRAGMA PPPRAGMASTR + | _PRAGMA LPAREN unified_string_literal RPAREN + """ + if len(p) == 5: + p[0] = c_ast.Pragma(p[3], self._token_coord(p, 2)) + elif len(p) == 3: + p[0] = c_ast.Pragma(p[2], self._token_coord(p, 2)) + else: + p[0] = c_ast.Pragma("", self._token_coord(p, 1)) + + def p_pppragma_directive_list(self, p): + """ pppragma_directive_list : pppragma_directive + | pppragma_directive_list pppragma_directive + """ + p[0] = [p[1]] if len(p) == 2 else p[1] + [p[2]] + + # In function definitions, the declarator can be followed by + # a declaration list, for old "K&R style" function definitios. + def p_function_definition_1(self, p): + """ function_definition : id_declarator declaration_list_opt compound_statement + """ + # no declaration specifiers - 'int' becomes the default type + spec = dict( + qual=[], + alignment=[], + storage=[], + type=[c_ast.IdentifierType(['int'], + coord=self._token_coord(p, 1))], + function=[]) + + p[0] = self._build_function_definition( + spec=spec, + decl=p[1], + param_decls=p[2], + body=p[3]) + + def p_function_definition_2(self, p): + """ function_definition : declaration_specifiers id_declarator declaration_list_opt compound_statement + """ + spec = p[1] + + p[0] = self._build_function_definition( + spec=spec, + decl=p[2], + param_decls=p[3], + body=p[4]) + + # Note, according to C18 A.2.2 6.7.10 static_assert-declaration _Static_assert + # is a declaration, not a statement. We additionally recognise it as a statement + # to fix parsing of _Static_assert inside the functions. + # + def p_statement(self, p): + """ statement : labeled_statement + | expression_statement + | compound_statement + | selection_statement + | iteration_statement + | jump_statement + | pppragma_directive + | static_assert + """ + p[0] = p[1] + + # A pragma is generally considered a decorator rather than an actual + # statement. Still, for the purposes of analyzing an abstract syntax tree of + # C code, pragma's should not be ignored and were previously treated as a + # statement. This presents a problem for constructs that take a statement + # such as labeled_statements, selection_statements, and + # iteration_statements, causing a misleading structure in the AST. For + # example, consider the following C code. + # + # for (int i = 0; i < 3; i++) + # #pragma omp critical + # sum += 1; + # + # This code will compile and execute "sum += 1;" as the body of the for + # loop. Previous implementations of PyCParser would render the AST for this + # block of code as follows: + # + # For: + # DeclList: + # Decl: i, [], [], [] + # TypeDecl: i, [] + # IdentifierType: ['int'] + # Constant: int, 0 + # BinaryOp: < + # ID: i + # Constant: int, 3 + # UnaryOp: p++ + # ID: i + # Pragma: omp critical + # Assignment: += + # ID: sum + # Constant: int, 1 + # + # This AST misleadingly takes the Pragma as the body of the loop and the + # assignment then becomes a sibling of the loop. + # + # To solve edge cases like these, the pragmacomp_or_statement rule groups + # a pragma and its following statement (which would otherwise be orphaned) + # using a compound block, effectively turning the above code into: + # + # for (int i = 0; i < 3; i++) { + # #pragma omp critical + # sum += 1; + # } + def p_pragmacomp_or_statement(self, p): + """ pragmacomp_or_statement : pppragma_directive_list statement + | statement + """ + if len(p) == 3: + p[0] = c_ast.Compound( + block_items=p[1]+[p[2]], + coord=self._token_coord(p, 1)) + else: + p[0] = p[1] + + # In C, declarations can come several in a line: + # int x, *px, romulo = 5; + # + # However, for the AST, we will split them to separate Decl + # nodes. + # + # This rule splits its declarations and always returns a list + # of Decl nodes, even if it's one element long. + # + def p_decl_body(self, p): + """ decl_body : declaration_specifiers init_declarator_list_opt + | declaration_specifiers_no_type id_init_declarator_list_opt + """ + spec = p[1] + + # p[2] (init_declarator_list_opt) is either a list or None + # + if p[2] is None: + # By the standard, you must have at least one declarator unless + # declaring a structure tag, a union tag, or the members of an + # enumeration. + # + ty = spec['type'] + s_u_or_e = (c_ast.Struct, c_ast.Union, c_ast.Enum) + if len(ty) == 1 and isinstance(ty[0], s_u_or_e): + decls = [c_ast.Decl( + name=None, + quals=spec['qual'], + align=spec['alignment'], + storage=spec['storage'], + funcspec=spec['function'], + type=ty[0], + init=None, + bitsize=None, + coord=ty[0].coord)] + + # However, this case can also occur on redeclared identifiers in + # an inner scope. The trouble is that the redeclared type's name + # gets grouped into declaration_specifiers; _build_declarations + # compensates for this. + # + else: + decls = self._build_declarations( + spec=spec, + decls=[dict(decl=None, init=None)], + typedef_namespace=True) + + else: + decls = self._build_declarations( + spec=spec, + decls=p[2], + typedef_namespace=True) + + p[0] = decls + + # The declaration has been split to a decl_body sub-rule and + # SEMI, because having them in a single rule created a problem + # for defining typedefs. + # + # If a typedef line was directly followed by a line using the + # type defined with the typedef, the type would not be + # recognized. This is because to reduce the declaration rule, + # the parser's lookahead asked for the token after SEMI, which + # was the type from the next line, and the lexer had no chance + # to see the updated type symbol table. + # + # Splitting solves this problem, because after seeing SEMI, + # the parser reduces decl_body, which actually adds the new + # type into the table to be seen by the lexer before the next + # line is reached. + def p_declaration(self, p): + """ declaration : decl_body SEMI + """ + p[0] = p[1] + + # Since each declaration is a list of declarations, this + # rule will combine all the declarations and return a single + # list + # + def p_declaration_list(self, p): + """ declaration_list : declaration + | declaration_list declaration + """ + p[0] = p[1] if len(p) == 2 else p[1] + p[2] + + # To know when declaration-specifiers end and declarators begin, + # we require declaration-specifiers to have at least one + # type-specifier, and disallow typedef-names after we've seen any + # type-specifier. These are both required by the spec. + # + def p_declaration_specifiers_no_type_1(self, p): + """ declaration_specifiers_no_type : type_qualifier declaration_specifiers_no_type_opt + """ + p[0] = self._add_declaration_specifier(p[2], p[1], 'qual') + + def p_declaration_specifiers_no_type_2(self, p): + """ declaration_specifiers_no_type : storage_class_specifier declaration_specifiers_no_type_opt + """ + p[0] = self._add_declaration_specifier(p[2], p[1], 'storage') + + def p_declaration_specifiers_no_type_3(self, p): + """ declaration_specifiers_no_type : function_specifier declaration_specifiers_no_type_opt + """ + p[0] = self._add_declaration_specifier(p[2], p[1], 'function') + + # Without this, `typedef _Atomic(T) U` will parse incorrectly because the + # _Atomic qualifier will match, instead of the specifier. + def p_declaration_specifiers_no_type_4(self, p): + """ declaration_specifiers_no_type : atomic_specifier declaration_specifiers_no_type_opt + """ + p[0] = self._add_declaration_specifier(p[2], p[1], 'type') + + def p_declaration_specifiers_no_type_5(self, p): + """ declaration_specifiers_no_type : alignment_specifier declaration_specifiers_no_type_opt + """ + p[0] = self._add_declaration_specifier(p[2], p[1], 'alignment') + + def p_declaration_specifiers_1(self, p): + """ declaration_specifiers : declaration_specifiers type_qualifier + """ + p[0] = self._add_declaration_specifier(p[1], p[2], 'qual', append=True) + + def p_declaration_specifiers_2(self, p): + """ declaration_specifiers : declaration_specifiers storage_class_specifier + """ + p[0] = self._add_declaration_specifier(p[1], p[2], 'storage', append=True) + + def p_declaration_specifiers_3(self, p): + """ declaration_specifiers : declaration_specifiers function_specifier + """ + p[0] = self._add_declaration_specifier(p[1], p[2], 'function', append=True) + + def p_declaration_specifiers_4(self, p): + """ declaration_specifiers : declaration_specifiers type_specifier_no_typeid + """ + p[0] = self._add_declaration_specifier(p[1], p[2], 'type', append=True) + + def p_declaration_specifiers_5(self, p): + """ declaration_specifiers : type_specifier + """ + p[0] = self._add_declaration_specifier(None, p[1], 'type') + + def p_declaration_specifiers_6(self, p): + """ declaration_specifiers : declaration_specifiers_no_type type_specifier + """ + p[0] = self._add_declaration_specifier(p[1], p[2], 'type', append=True) + + def p_declaration_specifiers_7(self, p): + """ declaration_specifiers : declaration_specifiers alignment_specifier + """ + p[0] = self._add_declaration_specifier(p[1], p[2], 'alignment', append=True) + + def p_storage_class_specifier(self, p): + """ storage_class_specifier : AUTO + | REGISTER + | STATIC + | EXTERN + | TYPEDEF + | _THREAD_LOCAL + """ + p[0] = p[1] + + def p_function_specifier(self, p): + """ function_specifier : INLINE + | _NORETURN + """ + p[0] = p[1] + + def p_type_specifier_no_typeid(self, p): + """ type_specifier_no_typeid : VOID + | _BOOL + | CHAR + | SHORT + | INT + | LONG + | FLOAT + | DOUBLE + | _COMPLEX + | SIGNED + | UNSIGNED + | __INT128 + """ + p[0] = c_ast.IdentifierType([p[1]], coord=self._token_coord(p, 1)) + + def p_type_specifier(self, p): + """ type_specifier : typedef_name + | enum_specifier + | struct_or_union_specifier + | type_specifier_no_typeid + | atomic_specifier + """ + p[0] = p[1] + + # See section 6.7.2.4 of the C11 standard. + def p_atomic_specifier(self, p): + """ atomic_specifier : _ATOMIC LPAREN type_name RPAREN + """ + typ = p[3] + typ.quals.append('_Atomic') + p[0] = typ + + def p_type_qualifier(self, p): + """ type_qualifier : CONST + | RESTRICT + | VOLATILE + | _ATOMIC + """ + p[0] = p[1] + + def p_init_declarator_list(self, p): + """ init_declarator_list : init_declarator + | init_declarator_list COMMA init_declarator + """ + p[0] = p[1] + [p[3]] if len(p) == 4 else [p[1]] + + # Returns a {decl= : init=} dictionary + # If there's no initializer, uses None + # + def p_init_declarator(self, p): + """ init_declarator : declarator + | declarator EQUALS initializer + """ + p[0] = dict(decl=p[1], init=(p[3] if len(p) > 2 else None)) + + def p_id_init_declarator_list(self, p): + """ id_init_declarator_list : id_init_declarator + | id_init_declarator_list COMMA init_declarator + """ + p[0] = p[1] + [p[3]] if len(p) == 4 else [p[1]] + + def p_id_init_declarator(self, p): + """ id_init_declarator : id_declarator + | id_declarator EQUALS initializer + """ + p[0] = dict(decl=p[1], init=(p[3] if len(p) > 2 else None)) + + # Require at least one type specifier in a specifier-qualifier-list + # + def p_specifier_qualifier_list_1(self, p): + """ specifier_qualifier_list : specifier_qualifier_list type_specifier_no_typeid + """ + p[0] = self._add_declaration_specifier(p[1], p[2], 'type', append=True) + + def p_specifier_qualifier_list_2(self, p): + """ specifier_qualifier_list : specifier_qualifier_list type_qualifier + """ + p[0] = self._add_declaration_specifier(p[1], p[2], 'qual', append=True) + + def p_specifier_qualifier_list_3(self, p): + """ specifier_qualifier_list : type_specifier + """ + p[0] = self._add_declaration_specifier(None, p[1], 'type') + + def p_specifier_qualifier_list_4(self, p): + """ specifier_qualifier_list : type_qualifier_list type_specifier + """ + p[0] = dict(qual=p[1], alignment=[], storage=[], type=[p[2]], function=[]) + + def p_specifier_qualifier_list_5(self, p): + """ specifier_qualifier_list : alignment_specifier + """ + p[0] = dict(qual=[], alignment=[p[1]], storage=[], type=[], function=[]) + + def p_specifier_qualifier_list_6(self, p): + """ specifier_qualifier_list : specifier_qualifier_list alignment_specifier + """ + p[0] = self._add_declaration_specifier(p[1], p[2], 'alignment') + + # TYPEID is allowed here (and in other struct/enum related tag names), because + # struct/enum tags reside in their own namespace and can be named the same as types + # + def p_struct_or_union_specifier_1(self, p): + """ struct_or_union_specifier : struct_or_union ID + | struct_or_union TYPEID + """ + klass = self._select_struct_union_class(p[1]) + # None means no list of members + p[0] = klass( + name=p[2], + decls=None, + coord=self._token_coord(p, 2)) + + def p_struct_or_union_specifier_2(self, p): + """ struct_or_union_specifier : struct_or_union brace_open struct_declaration_list brace_close + | struct_or_union brace_open brace_close + """ + klass = self._select_struct_union_class(p[1]) + if len(p) == 4: + # Empty sequence means an empty list of members + p[0] = klass( + name=None, + decls=[], + coord=self._token_coord(p, 2)) + else: + p[0] = klass( + name=None, + decls=p[3], + coord=self._token_coord(p, 2)) + + + def p_struct_or_union_specifier_3(self, p): + """ struct_or_union_specifier : struct_or_union ID brace_open struct_declaration_list brace_close + | struct_or_union ID brace_open brace_close + | struct_or_union TYPEID brace_open struct_declaration_list brace_close + | struct_or_union TYPEID brace_open brace_close + """ + klass = self._select_struct_union_class(p[1]) + if len(p) == 5: + # Empty sequence means an empty list of members + p[0] = klass( + name=p[2], + decls=[], + coord=self._token_coord(p, 2)) + else: + p[0] = klass( + name=p[2], + decls=p[4], + coord=self._token_coord(p, 2)) + + def p_struct_or_union(self, p): + """ struct_or_union : STRUCT + | UNION + """ + p[0] = p[1] + + # Combine all declarations into a single list + # + def p_struct_declaration_list(self, p): + """ struct_declaration_list : struct_declaration + | struct_declaration_list struct_declaration + """ + if len(p) == 2: + p[0] = p[1] or [] + else: + p[0] = p[1] + (p[2] or []) + + def p_struct_declaration_1(self, p): + """ struct_declaration : specifier_qualifier_list struct_declarator_list_opt SEMI + """ + spec = p[1] + assert 'typedef' not in spec['storage'] + + if p[2] is not None: + decls = self._build_declarations( + spec=spec, + decls=p[2]) + + elif len(spec['type']) == 1: + # Anonymous struct/union, gcc extension, C1x feature. + # Although the standard only allows structs/unions here, I see no + # reason to disallow other types since some compilers have typedefs + # here, and pycparser isn't about rejecting all invalid code. + # + node = spec['type'][0] + if isinstance(node, c_ast.Node): + decl_type = node + else: + decl_type = c_ast.IdentifierType(node) + + decls = self._build_declarations( + spec=spec, + decls=[dict(decl=decl_type)]) + + else: + # Structure/union members can have the same names as typedefs. + # The trouble is that the member's name gets grouped into + # specifier_qualifier_list; _build_declarations compensates. + # + decls = self._build_declarations( + spec=spec, + decls=[dict(decl=None, init=None)]) + + p[0] = decls + + def p_struct_declaration_2(self, p): + """ struct_declaration : SEMI + """ + p[0] = None + + def p_struct_declaration_3(self, p): + """ struct_declaration : pppragma_directive + """ + p[0] = [p[1]] + + def p_struct_declarator_list(self, p): + """ struct_declarator_list : struct_declarator + | struct_declarator_list COMMA struct_declarator + """ + p[0] = p[1] + [p[3]] if len(p) == 4 else [p[1]] + + # struct_declarator passes up a dict with the keys: decl (for + # the underlying declarator) and bitsize (for the bitsize) + # + def p_struct_declarator_1(self, p): + """ struct_declarator : declarator + """ + p[0] = {'decl': p[1], 'bitsize': None} + + def p_struct_declarator_2(self, p): + """ struct_declarator : declarator COLON constant_expression + | COLON constant_expression + """ + if len(p) > 3: + p[0] = {'decl': p[1], 'bitsize': p[3]} + else: + p[0] = {'decl': c_ast.TypeDecl(None, None, None, None), 'bitsize': p[2]} + + def p_enum_specifier_1(self, p): + """ enum_specifier : ENUM ID + | ENUM TYPEID + """ + p[0] = c_ast.Enum(p[2], None, self._token_coord(p, 1)) + + def p_enum_specifier_2(self, p): + """ enum_specifier : ENUM brace_open enumerator_list brace_close + """ + p[0] = c_ast.Enum(None, p[3], self._token_coord(p, 1)) + + def p_enum_specifier_3(self, p): + """ enum_specifier : ENUM ID brace_open enumerator_list brace_close + | ENUM TYPEID brace_open enumerator_list brace_close + """ + p[0] = c_ast.Enum(p[2], p[4], self._token_coord(p, 1)) + + def p_enumerator_list(self, p): + """ enumerator_list : enumerator + | enumerator_list COMMA + | enumerator_list COMMA enumerator + """ + if len(p) == 2: + p[0] = c_ast.EnumeratorList([p[1]], p[1].coord) + elif len(p) == 3: + p[0] = p[1] + else: + p[1].enumerators.append(p[3]) + p[0] = p[1] + + def p_alignment_specifier(self, p): + """ alignment_specifier : _ALIGNAS LPAREN type_name RPAREN + | _ALIGNAS LPAREN constant_expression RPAREN + """ + p[0] = c_ast.Alignas(p[3], self._token_coord(p, 1)) + + def p_enumerator(self, p): + """ enumerator : ID + | ID EQUALS constant_expression + """ + if len(p) == 2: + enumerator = c_ast.Enumerator( + p[1], None, + self._token_coord(p, 1)) + else: + enumerator = c_ast.Enumerator( + p[1], p[3], + self._token_coord(p, 1)) + self._add_identifier(enumerator.name, enumerator.coord) + + p[0] = enumerator + + def p_declarator(self, p): + """ declarator : id_declarator + | typeid_declarator + """ + p[0] = p[1] + + @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID')) + def p_xxx_declarator_1(self, p): + """ xxx_declarator : direct_xxx_declarator + """ + p[0] = p[1] + + @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID')) + def p_xxx_declarator_2(self, p): + """ xxx_declarator : pointer direct_xxx_declarator + """ + p[0] = self._type_modify_decl(p[2], p[1]) + + @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID')) + def p_direct_xxx_declarator_1(self, p): + """ direct_xxx_declarator : yyy + """ + p[0] = c_ast.TypeDecl( + declname=p[1], + type=None, + quals=None, + align=None, + coord=self._token_coord(p, 1)) + + @parameterized(('id', 'ID'), ('typeid', 'TYPEID')) + def p_direct_xxx_declarator_2(self, p): + """ direct_xxx_declarator : LPAREN xxx_declarator RPAREN + """ + p[0] = p[2] + + @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID')) + def p_direct_xxx_declarator_3(self, p): + """ direct_xxx_declarator : direct_xxx_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET + """ + quals = (p[3] if len(p) > 5 else []) or [] + # Accept dimension qualifiers + # Per C99 6.7.5.3 p7 + arr = c_ast.ArrayDecl( + type=None, + dim=p[4] if len(p) > 5 else p[3], + dim_quals=quals, + coord=p[1].coord) + + p[0] = self._type_modify_decl(decl=p[1], modifier=arr) + + @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID')) + def p_direct_xxx_declarator_4(self, p): + """ direct_xxx_declarator : direct_xxx_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET + | direct_xxx_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET + """ + # Using slice notation for PLY objects doesn't work in Python 3 for the + # version of PLY embedded with pycparser; see PLY Google Code issue 30. + # Work around that here by listing the two elements separately. + listed_quals = [item if isinstance(item, list) else [item] + for item in [p[3],p[4]]] + dim_quals = [qual for sublist in listed_quals for qual in sublist + if qual is not None] + arr = c_ast.ArrayDecl( + type=None, + dim=p[5], + dim_quals=dim_quals, + coord=p[1].coord) + + p[0] = self._type_modify_decl(decl=p[1], modifier=arr) + + # Special for VLAs + # + @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID')) + def p_direct_xxx_declarator_5(self, p): + """ direct_xxx_declarator : direct_xxx_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET + """ + arr = c_ast.ArrayDecl( + type=None, + dim=c_ast.ID(p[4], self._token_coord(p, 4)), + dim_quals=p[3] if p[3] is not None else [], + coord=p[1].coord) + + p[0] = self._type_modify_decl(decl=p[1], modifier=arr) + + @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID')) + def p_direct_xxx_declarator_6(self, p): + """ direct_xxx_declarator : direct_xxx_declarator LPAREN parameter_type_list RPAREN + | direct_xxx_declarator LPAREN identifier_list_opt RPAREN + """ + func = c_ast.FuncDecl( + args=p[3], + type=None, + coord=p[1].coord) + + # To see why _get_yacc_lookahead_token is needed, consider: + # typedef char TT; + # void foo(int TT) { TT = 10; } + # Outside the function, TT is a typedef, but inside (starting and + # ending with the braces) it's a parameter. The trouble begins with + # yacc's lookahead token. We don't know if we're declaring or + # defining a function until we see LBRACE, but if we wait for yacc to + # trigger a rule on that token, then TT will have already been read + # and incorrectly interpreted as TYPEID. We need to add the + # parameters to the scope the moment the lexer sees LBRACE. + # + if self._get_yacc_lookahead_token().type == "LBRACE": + if func.args is not None: + for param in func.args.params: + if isinstance(param, c_ast.EllipsisParam): break + self._add_identifier(param.name, param.coord) + + p[0] = self._type_modify_decl(decl=p[1], modifier=func) + + def p_pointer(self, p): + """ pointer : TIMES type_qualifier_list_opt + | TIMES type_qualifier_list_opt pointer + """ + coord = self._token_coord(p, 1) + # Pointer decls nest from inside out. This is important when different + # levels have different qualifiers. For example: + # + # char * const * p; + # + # Means "pointer to const pointer to char" + # + # While: + # + # char ** const p; + # + # Means "const pointer to pointer to char" + # + # So when we construct PtrDecl nestings, the leftmost pointer goes in + # as the most nested type. + nested_type = c_ast.PtrDecl(quals=p[2] or [], type=None, coord=coord) + if len(p) > 3: + tail_type = p[3] + while tail_type.type is not None: + tail_type = tail_type.type + tail_type.type = nested_type + p[0] = p[3] + else: + p[0] = nested_type + + def p_type_qualifier_list(self, p): + """ type_qualifier_list : type_qualifier + | type_qualifier_list type_qualifier + """ + p[0] = [p[1]] if len(p) == 2 else p[1] + [p[2]] + + def p_parameter_type_list(self, p): + """ parameter_type_list : parameter_list + | parameter_list COMMA ELLIPSIS + """ + if len(p) > 2: + p[1].params.append(c_ast.EllipsisParam(self._token_coord(p, 3))) + + p[0] = p[1] + + def p_parameter_list(self, p): + """ parameter_list : parameter_declaration + | parameter_list COMMA parameter_declaration + """ + if len(p) == 2: # single parameter + p[0] = c_ast.ParamList([p[1]], p[1].coord) + else: + p[1].params.append(p[3]) + p[0] = p[1] + + # From ISO/IEC 9899:TC2, 6.7.5.3.11: + # "If, in a parameter declaration, an identifier can be treated either + # as a typedef name or as a parameter name, it shall be taken as a + # typedef name." + # + # Inside a parameter declaration, once we've reduced declaration specifiers, + # if we shift in an LPAREN and see a TYPEID, it could be either an abstract + # declarator or a declarator nested inside parens. This rule tells us to + # always treat it as an abstract declarator. Therefore, we only accept + # `id_declarator`s and `typeid_noparen_declarator`s. + def p_parameter_declaration_1(self, p): + """ parameter_declaration : declaration_specifiers id_declarator + | declaration_specifiers typeid_noparen_declarator + """ + spec = p[1] + if not spec['type']: + spec['type'] = [c_ast.IdentifierType(['int'], + coord=self._token_coord(p, 1))] + p[0] = self._build_declarations( + spec=spec, + decls=[dict(decl=p[2])])[0] + + def p_parameter_declaration_2(self, p): + """ parameter_declaration : declaration_specifiers abstract_declarator_opt + """ + spec = p[1] + if not spec['type']: + spec['type'] = [c_ast.IdentifierType(['int'], + coord=self._token_coord(p, 1))] + + # Parameters can have the same names as typedefs. The trouble is that + # the parameter's name gets grouped into declaration_specifiers, making + # it look like an old-style declaration; compensate. + # + if len(spec['type']) > 1 and len(spec['type'][-1].names) == 1 and \ + self._is_type_in_scope(spec['type'][-1].names[0]): + decl = self._build_declarations( + spec=spec, + decls=[dict(decl=p[2], init=None)])[0] + + # This truly is an old-style parameter declaration + # + else: + decl = c_ast.Typename( + name='', + quals=spec['qual'], + align=None, + type=p[2] or c_ast.TypeDecl(None, None, None, None), + coord=self._token_coord(p, 2)) + typename = spec['type'] + decl = self._fix_decl_name_type(decl, typename) + + p[0] = decl + + def p_identifier_list(self, p): + """ identifier_list : identifier + | identifier_list COMMA identifier + """ + if len(p) == 2: # single parameter + p[0] = c_ast.ParamList([p[1]], p[1].coord) + else: + p[1].params.append(p[3]) + p[0] = p[1] + + def p_initializer_1(self, p): + """ initializer : assignment_expression + """ + p[0] = p[1] + + def p_initializer_2(self, p): + """ initializer : brace_open initializer_list_opt brace_close + | brace_open initializer_list COMMA brace_close + """ + if p[2] is None: + p[0] = c_ast.InitList([], self._token_coord(p, 1)) + else: + p[0] = p[2] + + def p_initializer_list(self, p): + """ initializer_list : designation_opt initializer + | initializer_list COMMA designation_opt initializer + """ + if len(p) == 3: # single initializer + init = p[2] if p[1] is None else c_ast.NamedInitializer(p[1], p[2]) + p[0] = c_ast.InitList([init], p[2].coord) + else: + init = p[4] if p[3] is None else c_ast.NamedInitializer(p[3], p[4]) + p[1].exprs.append(init) + p[0] = p[1] + + def p_designation(self, p): + """ designation : designator_list EQUALS + """ + p[0] = p[1] + + # Designators are represented as a list of nodes, in the order in which + # they're written in the code. + # + def p_designator_list(self, p): + """ designator_list : designator + | designator_list designator + """ + p[0] = [p[1]] if len(p) == 2 else p[1] + [p[2]] + + def p_designator(self, p): + """ designator : LBRACKET constant_expression RBRACKET + | PERIOD identifier + """ + p[0] = p[2] + + def p_type_name(self, p): + """ type_name : specifier_qualifier_list abstract_declarator_opt + """ + typename = c_ast.Typename( + name='', + quals=p[1]['qual'][:], + align=None, + type=p[2] or c_ast.TypeDecl(None, None, None, None), + coord=self._token_coord(p, 2)) + + p[0] = self._fix_decl_name_type(typename, p[1]['type']) + + def p_abstract_declarator_1(self, p): + """ abstract_declarator : pointer + """ + dummytype = c_ast.TypeDecl(None, None, None, None) + p[0] = self._type_modify_decl( + decl=dummytype, + modifier=p[1]) + + def p_abstract_declarator_2(self, p): + """ abstract_declarator : pointer direct_abstract_declarator + """ + p[0] = self._type_modify_decl(p[2], p[1]) + + def p_abstract_declarator_3(self, p): + """ abstract_declarator : direct_abstract_declarator + """ + p[0] = p[1] + + # Creating and using direct_abstract_declarator_opt here + # instead of listing both direct_abstract_declarator and the + # lack of it in the beginning of _1 and _2 caused two + # shift/reduce errors. + # + def p_direct_abstract_declarator_1(self, p): + """ direct_abstract_declarator : LPAREN abstract_declarator RPAREN """ + p[0] = p[2] + + def p_direct_abstract_declarator_2(self, p): + """ direct_abstract_declarator : direct_abstract_declarator LBRACKET assignment_expression_opt RBRACKET + """ + arr = c_ast.ArrayDecl( + type=None, + dim=p[3], + dim_quals=[], + coord=p[1].coord) + + p[0] = self._type_modify_decl(decl=p[1], modifier=arr) + + def p_direct_abstract_declarator_3(self, p): + """ direct_abstract_declarator : LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET + """ + quals = (p[2] if len(p) > 4 else []) or [] + p[0] = c_ast.ArrayDecl( + type=c_ast.TypeDecl(None, None, None, None), + dim=p[3] if len(p) > 4 else p[2], + dim_quals=quals, + coord=self._token_coord(p, 1)) + + def p_direct_abstract_declarator_4(self, p): + """ direct_abstract_declarator : direct_abstract_declarator LBRACKET TIMES RBRACKET + """ + arr = c_ast.ArrayDecl( + type=None, + dim=c_ast.ID(p[3], self._token_coord(p, 3)), + dim_quals=[], + coord=p[1].coord) + + p[0] = self._type_modify_decl(decl=p[1], modifier=arr) + + def p_direct_abstract_declarator_5(self, p): + """ direct_abstract_declarator : LBRACKET TIMES RBRACKET + """ + p[0] = c_ast.ArrayDecl( + type=c_ast.TypeDecl(None, None, None, None), + dim=c_ast.ID(p[3], self._token_coord(p, 3)), + dim_quals=[], + coord=self._token_coord(p, 1)) + + def p_direct_abstract_declarator_6(self, p): + """ direct_abstract_declarator : direct_abstract_declarator LPAREN parameter_type_list_opt RPAREN + """ + func = c_ast.FuncDecl( + args=p[3], + type=None, + coord=p[1].coord) + + p[0] = self._type_modify_decl(decl=p[1], modifier=func) + + def p_direct_abstract_declarator_7(self, p): + """ direct_abstract_declarator : LPAREN parameter_type_list_opt RPAREN + """ + p[0] = c_ast.FuncDecl( + args=p[2], + type=c_ast.TypeDecl(None, None, None, None), + coord=self._token_coord(p, 1)) + + def p_direct_abstract_declarator_8(self, p): + """ direct_abstract_declarator : LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET + | LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET + """ + listed_quals = [item if isinstance(item, list) else [item] + for item in [p[2],p[3]]] + quals = [qual for sublist in listed_quals for qual in sublist + if qual is not None] + p[0] = c_ast.ArrayDecl( + type=c_ast.TypeDecl(None, None, None, None), + dim=p[4], + dim_quals=quals, + coord=self._token_coord(p, 1)) + + # declaration is a list, statement isn't. To make it consistent, block_item + # will always be a list + # + def p_block_item(self, p): + """ block_item : declaration + | statement + """ + p[0] = p[1] if isinstance(p[1], list) else [p[1]] + + # Since we made block_item a list, this just combines lists + # + def p_block_item_list(self, p): + """ block_item_list : block_item + | block_item_list block_item + """ + # Empty block items (plain ';') produce [None], so ignore them + p[0] = p[1] if (len(p) == 2 or p[2] == [None]) else p[1] + p[2] + + def p_compound_statement_1(self, p): + """ compound_statement : brace_open block_item_list_opt brace_close """ + p[0] = c_ast.Compound( + block_items=p[2], + coord=self._token_coord(p, 1)) + + def p_labeled_statement_1(self, p): + """ labeled_statement : ID COLON pragmacomp_or_statement """ + p[0] = c_ast.Label(p[1], p[3], self._token_coord(p, 1)) + + def p_labeled_statement_2(self, p): + """ labeled_statement : CASE constant_expression COLON pragmacomp_or_statement """ + p[0] = c_ast.Case(p[2], [p[4]], self._token_coord(p, 1)) + + def p_labeled_statement_3(self, p): + """ labeled_statement : DEFAULT COLON pragmacomp_or_statement """ + p[0] = c_ast.Default([p[3]], self._token_coord(p, 1)) + + def p_labeled_statement_4(self, p): + """ labeled_statement : ID COLON """ + p[0] = c_ast.Label(p[1], c_ast.EmptyStatement(self._token_coord(p, 1)), self._token_coord(p, 1)) + + def p_labeled_statement_5(self, p): + """ labeled_statement : CASE constant_expression COLON """ + p[0] = c_ast.Case(p[2], [c_ast.EmptyStatement(self._token_coord(p, 2))], self._token_coord(p, 1)) + + def p_labeled_statement_6(self, p): + """ labeled_statement : DEFAULT COLON """ + p[0] = c_ast.Default([c_ast.EmptyStatement(self._token_coord(p, 1))], self._token_coord(p, 1)) + + def p_selection_statement_1(self, p): + """ selection_statement : IF LPAREN expression RPAREN pragmacomp_or_statement """ + p[0] = c_ast.If(p[3], p[5], None, self._token_coord(p, 1)) + + def p_selection_statement_2(self, p): + """ selection_statement : IF LPAREN expression RPAREN statement ELSE pragmacomp_or_statement """ + p[0] = c_ast.If(p[3], p[5], p[7], self._token_coord(p, 1)) + + def p_selection_statement_3(self, p): + """ selection_statement : SWITCH LPAREN expression RPAREN pragmacomp_or_statement """ + p[0] = fix_switch_cases( + c_ast.Switch(p[3], p[5], self._token_coord(p, 1))) + + def p_iteration_statement_1(self, p): + """ iteration_statement : WHILE LPAREN expression RPAREN pragmacomp_or_statement """ + p[0] = c_ast.While(p[3], p[5], self._token_coord(p, 1)) + + def p_iteration_statement_2(self, p): + """ iteration_statement : DO pragmacomp_or_statement WHILE LPAREN expression RPAREN SEMI """ + p[0] = c_ast.DoWhile(p[5], p[2], self._token_coord(p, 1)) + + def p_iteration_statement_3(self, p): + """ iteration_statement : FOR LPAREN expression_opt SEMI expression_opt SEMI expression_opt RPAREN pragmacomp_or_statement """ + p[0] = c_ast.For(p[3], p[5], p[7], p[9], self._token_coord(p, 1)) + + def p_iteration_statement_4(self, p): + """ iteration_statement : FOR LPAREN declaration expression_opt SEMI expression_opt RPAREN pragmacomp_or_statement """ + p[0] = c_ast.For(c_ast.DeclList(p[3], self._token_coord(p, 1)), + p[4], p[6], p[8], self._token_coord(p, 1)) + + def p_jump_statement_1(self, p): + """ jump_statement : GOTO ID SEMI """ + p[0] = c_ast.Goto(p[2], self._token_coord(p, 1)) + + def p_jump_statement_2(self, p): + """ jump_statement : BREAK SEMI """ + p[0] = c_ast.Break(self._token_coord(p, 1)) + + def p_jump_statement_3(self, p): + """ jump_statement : CONTINUE SEMI """ + p[0] = c_ast.Continue(self._token_coord(p, 1)) + + def p_jump_statement_4(self, p): + """ jump_statement : RETURN expression SEMI + | RETURN SEMI + """ + p[0] = c_ast.Return(p[2] if len(p) == 4 else None, self._token_coord(p, 1)) + + def p_expression_statement(self, p): + """ expression_statement : expression_opt SEMI """ + if p[1] is None: + p[0] = c_ast.EmptyStatement(self._token_coord(p, 2)) + else: + p[0] = p[1] + + def p_expression(self, p): + """ expression : assignment_expression + | expression COMMA assignment_expression + """ + if len(p) == 2: + p[0] = p[1] + else: + if not isinstance(p[1], c_ast.ExprList): + p[1] = c_ast.ExprList([p[1]], p[1].coord) + + p[1].exprs.append(p[3]) + p[0] = p[1] + + def p_parenthesized_compound_expression(self, p): + """ assignment_expression : LPAREN compound_statement RPAREN """ + p[0] = p[2] + + def p_typedef_name(self, p): + """ typedef_name : TYPEID """ + p[0] = c_ast.IdentifierType([p[1]], coord=self._token_coord(p, 1)) + + def p_assignment_expression(self, p): + """ assignment_expression : conditional_expression + | unary_expression assignment_operator assignment_expression + """ + if len(p) == 2: + p[0] = p[1] + else: + p[0] = c_ast.Assignment(p[2], p[1], p[3], p[1].coord) + + # K&R2 defines these as many separate rules, to encode + # precedence and associativity. Why work hard ? I'll just use + # the built in precedence/associativity specification feature + # of PLY. (see precedence declaration above) + # + def p_assignment_operator(self, p): + """ assignment_operator : EQUALS + | XOREQUAL + | TIMESEQUAL + | DIVEQUAL + | MODEQUAL + | PLUSEQUAL + | MINUSEQUAL + | LSHIFTEQUAL + | RSHIFTEQUAL + | ANDEQUAL + | OREQUAL + """ + p[0] = p[1] + + def p_constant_expression(self, p): + """ constant_expression : conditional_expression """ + p[0] = p[1] + + def p_conditional_expression(self, p): + """ conditional_expression : binary_expression + | binary_expression CONDOP expression COLON conditional_expression + """ + if len(p) == 2: + p[0] = p[1] + else: + p[0] = c_ast.TernaryOp(p[1], p[3], p[5], p[1].coord) + + def p_binary_expression(self, p): + """ binary_expression : cast_expression + | binary_expression TIMES binary_expression + | binary_expression DIVIDE binary_expression + | binary_expression MOD binary_expression + | binary_expression PLUS binary_expression + | binary_expression MINUS binary_expression + | binary_expression RSHIFT binary_expression + | binary_expression LSHIFT binary_expression + | binary_expression LT binary_expression + | binary_expression LE binary_expression + | binary_expression GE binary_expression + | binary_expression GT binary_expression + | binary_expression EQ binary_expression + | binary_expression NE binary_expression + | binary_expression AND binary_expression + | binary_expression OR binary_expression + | binary_expression XOR binary_expression + | binary_expression LAND binary_expression + | binary_expression LOR binary_expression + """ + if len(p) == 2: + p[0] = p[1] + else: + p[0] = c_ast.BinaryOp(p[2], p[1], p[3], p[1].coord) + + def p_cast_expression_1(self, p): + """ cast_expression : unary_expression """ + p[0] = p[1] + + def p_cast_expression_2(self, p): + """ cast_expression : LPAREN type_name RPAREN cast_expression """ + p[0] = c_ast.Cast(p[2], p[4], self._token_coord(p, 1)) + + def p_unary_expression_1(self, p): + """ unary_expression : postfix_expression """ + p[0] = p[1] + + def p_unary_expression_2(self, p): + """ unary_expression : PLUSPLUS unary_expression + | MINUSMINUS unary_expression + | unary_operator cast_expression + """ + p[0] = c_ast.UnaryOp(p[1], p[2], p[2].coord) + + def p_unary_expression_3(self, p): + """ unary_expression : SIZEOF unary_expression + | SIZEOF LPAREN type_name RPAREN + | _ALIGNOF LPAREN type_name RPAREN + """ + p[0] = c_ast.UnaryOp( + p[1], + p[2] if len(p) == 3 else p[3], + self._token_coord(p, 1)) + + def p_unary_operator(self, p): + """ unary_operator : AND + | TIMES + | PLUS + | MINUS + | NOT + | LNOT + """ + p[0] = p[1] + + def p_postfix_expression_1(self, p): + """ postfix_expression : primary_expression """ + p[0] = p[1] + + def p_postfix_expression_2(self, p): + """ postfix_expression : postfix_expression LBRACKET expression RBRACKET """ + p[0] = c_ast.ArrayRef(p[1], p[3], p[1].coord) + + def p_postfix_expression_3(self, p): + """ postfix_expression : postfix_expression LPAREN argument_expression_list RPAREN + | postfix_expression LPAREN RPAREN + """ + p[0] = c_ast.FuncCall(p[1], p[3] if len(p) == 5 else None, p[1].coord) + + def p_postfix_expression_4(self, p): + """ postfix_expression : postfix_expression PERIOD ID + | postfix_expression PERIOD TYPEID + | postfix_expression ARROW ID + | postfix_expression ARROW TYPEID + """ + field = c_ast.ID(p[3], self._token_coord(p, 3)) + p[0] = c_ast.StructRef(p[1], p[2], field, p[1].coord) + + def p_postfix_expression_5(self, p): + """ postfix_expression : postfix_expression PLUSPLUS + | postfix_expression MINUSMINUS + """ + p[0] = c_ast.UnaryOp('p' + p[2], p[1], p[1].coord) + + def p_postfix_expression_6(self, p): + """ postfix_expression : LPAREN type_name RPAREN brace_open initializer_list brace_close + | LPAREN type_name RPAREN brace_open initializer_list COMMA brace_close + """ + p[0] = c_ast.CompoundLiteral(p[2], p[5]) + + def p_primary_expression_1(self, p): + """ primary_expression : identifier """ + p[0] = p[1] + + def p_primary_expression_2(self, p): + """ primary_expression : constant """ + p[0] = p[1] + + def p_primary_expression_3(self, p): + """ primary_expression : unified_string_literal + | unified_wstring_literal + """ + p[0] = p[1] + + def p_primary_expression_4(self, p): + """ primary_expression : LPAREN expression RPAREN """ + p[0] = p[2] + + def p_primary_expression_5(self, p): + """ primary_expression : OFFSETOF LPAREN type_name COMMA offsetof_member_designator RPAREN + """ + coord = self._token_coord(p, 1) + p[0] = c_ast.FuncCall(c_ast.ID(p[1], coord), + c_ast.ExprList([p[3], p[5]], coord), + coord) + + def p_offsetof_member_designator(self, p): + """ offsetof_member_designator : identifier + | offsetof_member_designator PERIOD identifier + | offsetof_member_designator LBRACKET expression RBRACKET + """ + if len(p) == 2: + p[0] = p[1] + elif len(p) == 4: + p[0] = c_ast.StructRef(p[1], p[2], p[3], p[1].coord) + elif len(p) == 5: + p[0] = c_ast.ArrayRef(p[1], p[3], p[1].coord) + else: + raise NotImplementedError("Unexpected parsing state. len(p): %u" % len(p)) + + def p_argument_expression_list(self, p): + """ argument_expression_list : assignment_expression + | argument_expression_list COMMA assignment_expression + """ + if len(p) == 2: # single expr + p[0] = c_ast.ExprList([p[1]], p[1].coord) + else: + p[1].exprs.append(p[3]) + p[0] = p[1] + + def p_identifier(self, p): + """ identifier : ID """ + p[0] = c_ast.ID(p[1], self._token_coord(p, 1)) + + def p_constant_1(self, p): + """ constant : INT_CONST_DEC + | INT_CONST_OCT + | INT_CONST_HEX + | INT_CONST_BIN + | INT_CONST_CHAR + """ + uCount = 0 + lCount = 0 + for x in p[1][-3:]: + if x in ('l', 'L'): + lCount += 1 + elif x in ('u', 'U'): + uCount += 1 + t = '' + if uCount > 1: + raise ValueError('Constant cannot have more than one u/U suffix.') + elif lCount > 2: + raise ValueError('Constant cannot have more than two l/L suffix.') + prefix = 'unsigned ' * uCount + 'long ' * lCount + p[0] = c_ast.Constant( + prefix + 'int', p[1], self._token_coord(p, 1)) + + def p_constant_2(self, p): + """ constant : FLOAT_CONST + | HEX_FLOAT_CONST + """ + if p[1][-1] in ('f', 'F'): + t = 'float' + elif p[1][-1] in ('l', 'L'): + t = 'long double' + else: + t = 'double' + + p[0] = c_ast.Constant( + t, p[1], self._token_coord(p, 1)) + + def p_constant_3(self, p): + """ constant : CHAR_CONST + | WCHAR_CONST + | U8CHAR_CONST + | U16CHAR_CONST + | U32CHAR_CONST + """ + p[0] = c_ast.Constant( + 'char', p[1], self._token_coord(p, 1)) + + # The "unified" string and wstring literal rules are for supporting + # concatenation of adjacent string literals. + # I.e. "hello " "world" is seen by the C compiler as a single string literal + # with the value "hello world" + # + def p_unified_string_literal(self, p): + """ unified_string_literal : STRING_LITERAL + | unified_string_literal STRING_LITERAL + """ + if len(p) == 2: # single literal + p[0] = c_ast.Constant( + 'string', p[1], self._token_coord(p, 1)) + else: + p[1].value = p[1].value[:-1] + p[2][1:] + p[0] = p[1] + + def p_unified_wstring_literal(self, p): + """ unified_wstring_literal : WSTRING_LITERAL + | U8STRING_LITERAL + | U16STRING_LITERAL + | U32STRING_LITERAL + | unified_wstring_literal WSTRING_LITERAL + | unified_wstring_literal U8STRING_LITERAL + | unified_wstring_literal U16STRING_LITERAL + | unified_wstring_literal U32STRING_LITERAL + """ + if len(p) == 2: # single literal + p[0] = c_ast.Constant( + 'string', p[1], self._token_coord(p, 1)) + else: + p[1].value = p[1].value.rstrip()[:-1] + p[2][2:] + p[0] = p[1] + + def p_brace_open(self, p): + """ brace_open : LBRACE + """ + p[0] = p[1] + p.set_lineno(0, p.lineno(1)) + + def p_brace_close(self, p): + """ brace_close : RBRACE + """ + p[0] = p[1] + p.set_lineno(0, p.lineno(1)) + + def p_empty(self, p): + 'empty : ' + p[0] = None + + def p_error(self, p): + # If error recovery is added here in the future, make sure + # _get_yacc_lookahead_token still works! + # + if p: + self._parse_error( + 'before: %s' % p.value, + self._coord(lineno=p.lineno, + column=self.clex.find_tok_column(p))) + else: + self._parse_error('At end of input', self.clex.filename) diff --git a/venv/lib/python3.12/site-packages/pycparser/lextab.py b/venv/lib/python3.12/site-packages/pycparser/lextab.py new file mode 100644 index 00000000..bfcaa211 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pycparser/lextab.py @@ -0,0 +1,10 @@ +# lextab.py. This file automatically created by PLY (version 3.10). Don't edit! +_tabversion = '3.10' +_lextokens = set(('AND', 'ANDEQUAL', 'ARROW', 'AUTO', 'BREAK', 'CASE', 'CHAR', 'CHAR_CONST', 'COLON', 'COMMA', 'CONDOP', 'CONST', 'CONTINUE', 'DEFAULT', 'DIVEQUAL', 'DIVIDE', 'DO', 'DOUBLE', 'ELLIPSIS', 'ELSE', 'ENUM', 'EQ', 'EQUALS', 'EXTERN', 'FLOAT', 'FLOAT_CONST', 'FOR', 'GE', 'GOTO', 'GT', 'HEX_FLOAT_CONST', 'ID', 'IF', 'INLINE', 'INT', 'INT_CONST_BIN', 'INT_CONST_CHAR', 'INT_CONST_DEC', 'INT_CONST_HEX', 'INT_CONST_OCT', 'LAND', 'LBRACE', 'LBRACKET', 'LE', 'LNOT', 'LONG', 'LOR', 'LPAREN', 'LSHIFT', 'LSHIFTEQUAL', 'LT', 'MINUS', 'MINUSEQUAL', 'MINUSMINUS', 'MOD', 'MODEQUAL', 'NE', 'NOT', 'OFFSETOF', 'OR', 'OREQUAL', 'PERIOD', 'PLUS', 'PLUSEQUAL', 'PLUSPLUS', 'PPHASH', 'PPPRAGMA', 'PPPRAGMASTR', 'RBRACE', 'RBRACKET', 'REGISTER', 'RESTRICT', 'RETURN', 'RPAREN', 'RSHIFT', 'RSHIFTEQUAL', 'SEMI', 'SHORT', 'SIGNED', 'SIZEOF', 'STATIC', 'STRING_LITERAL', 'STRUCT', 'SWITCH', 'TIMES', 'TIMESEQUAL', 'TYPEDEF', 'TYPEID', 'U16CHAR_CONST', 'U16STRING_LITERAL', 'U32CHAR_CONST', 'U32STRING_LITERAL', 'U8CHAR_CONST', 'U8STRING_LITERAL', 'UNION', 'UNSIGNED', 'VOID', 'VOLATILE', 'WCHAR_CONST', 'WHILE', 'WSTRING_LITERAL', 'XOR', 'XOREQUAL', '_ALIGNAS', '_ALIGNOF', '_ATOMIC', '_BOOL', '_COMPLEX', '_NORETURN', '_PRAGMA', '_STATIC_ASSERT', '_THREAD_LOCAL', '__INT128')) +_lexreflags = 64 +_lexliterals = '' +_lexstateinfo = {'INITIAL': 'inclusive', 'ppline': 'exclusive', 'pppragma': 'exclusive'} +_lexstatere = {'INITIAL': [('(?P[ \\t]*\\#)|(?P\\n+)|(?P\\{)|(?P\\})|(?P((((([0-9]*\\.[0-9]+)|([0-9]+\\.))([eE][-+]?[0-9]+)?)|([0-9]+([eE][-+]?[0-9]+)))[FfLl]?))|(?P(0[xX]([0-9a-fA-F]+|((([0-9a-fA-F]+)?\\.[0-9a-fA-F]+)|([0-9a-fA-F]+\\.)))([pP][+-]?[0-9]+)[FfLl]?))|(?P0[xX][0-9a-fA-F]+(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?)|(?P0[bB][01]+(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?)|(?P0[0-7]*[89])|(?P\\/\\*)|(?P\\/\\/)|(?P0[0-7]*(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?)|(?P(0(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?)|([1-9][0-9]*(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?))|(?P\'([^\'\\\\\\n]|(\\\\(([a-wyzA-Z._~!=&\\^\\-\\\\?\'"]|x(?![0-9a-fA-F]))|(\\d+)(?!\\d)|(x[0-9a-fA-F]+)(?![0-9a-fA-F])))){2,4}\')|(?P\'([^\'\\\\\\n]|(\\\\(([a-wyzA-Z._~!=&\\^\\-\\\\?\'"]|x(?![0-9a-fA-F]))|(\\d+)(?!\\d)|(x[0-9a-fA-F]+)(?![0-9a-fA-F]))))\')|(?PL\'([^\'\\\\\\n]|(\\\\(([a-wyzA-Z._~!=&\\^\\-\\\\?\'"]|x(?![0-9a-fA-F]))|(\\d+)(?!\\d)|(x[0-9a-fA-F]+)(?![0-9a-fA-F]))))\')|(?Pu8\'([^\'\\\\\\n]|(\\\\(([a-wyzA-Z._~!=&\\^\\-\\\\?\'"]|x(?![0-9a-fA-F]))|(\\d+)(?!\\d)|(x[0-9a-fA-F]+)(?![0-9a-fA-F]))))\')|(?Pu\'([^\'\\\\\\n]|(\\\\(([a-wyzA-Z._~!=&\\^\\-\\\\?\'"]|x(?![0-9a-fA-F]))|(\\d+)(?!\\d)|(x[0-9a-fA-F]+)(?![0-9a-fA-F]))))\')|(?PU\'([^\'\\\\\\n]|(\\\\(([a-wyzA-Z._~!=&\\^\\-\\\\?\'"]|x(?![0-9a-fA-F]))|(\\d+)(?!\\d)|(x[0-9a-fA-F]+)(?![0-9a-fA-F]))))\')|(?P(\'([^\'\\\\\\n]|(\\\\(([a-wyzA-Z._~!=&\\^\\-\\\\?\'"]|x(?![0-9a-fA-F]))|(\\d+)(?!\\d)|(x[0-9a-fA-F]+)(?![0-9a-fA-F]))))*\\n)|(\'([^\'\\\\\\n]|(\\\\(([a-wyzA-Z._~!=&\\^\\-\\\\?\'"]|x(?![0-9a-fA-F]))|(\\d+)(?!\\d)|(x[0-9a-fA-F]+)(?![0-9a-fA-F]))))*$))|(?P(\'([^\'\\\\\\n]|(\\\\(([a-wyzA-Z._~!=&\\^\\-\\\\?\'"]|x(?![0-9a-fA-F]))|(\\d+)(?!\\d)|(x[0-9a-fA-F]+)(?![0-9a-fA-F]))))[^\'\n]+\')|(\'\')|(\'([\\\\][^a-zA-Z._~^!=&\\^\\-\\\\?\'"x0-9])[^\'\\n]*\'))|(?PL"([^"\\\\\\n]|(\\\\[0-9a-zA-Z._~!=&\\^\\-\\\\?\'"]))*")|(?Pu8"([^"\\\\\\n]|(\\\\[0-9a-zA-Z._~!=&\\^\\-\\\\?\'"]))*")|(?Pu"([^"\\\\\\n]|(\\\\[0-9a-zA-Z._~!=&\\^\\-\\\\?\'"]))*")|(?PU"([^"\\\\\\n]|(\\\\[0-9a-zA-Z._~!=&\\^\\-\\\\?\'"]))*")|(?P"([^"\\\\\\n]|(\\\\[0-9a-zA-Z._~!=&\\^\\-\\\\?\'"]))*([\\\\][^a-zA-Z._~^!=&\\^\\-\\\\?\'"x0-9])([^"\\\\\\n]|(\\\\[0-9a-zA-Z._~!=&\\^\\-\\\\?\'"]))*")|(?P[a-zA-Z_$][0-9a-zA-Z_$]*)|(?P"([^"\\\\\\n]|(\\\\[0-9a-zA-Z._~!=&\\^\\-\\\\?\'"]))*")|(?P\\.\\.\\.)|(?P\\|\\|)|(?P\\+\\+)|(?P<<=)|(?P\\|=)|(?P\\+=)|(?P>>=)|(?P\\*=)|(?P\\^=)|(?P&=)|(?P->)|(?P\\?)|(?P/=)|(?P==)|(?P>=)|(?P&&)|(?P\\[)|(?P<=)|(?P\\()|(?P<<)|(?P-=)|(?P--)|(?P%=)|(?P!=)|(?P\\|)|(?P\\.)|(?P\\+)|(?P\\])|(?P\\))|(?P>>)|(?P\\*)|(?P\\^)|(?P&)|(?P:)|(?P,)|(?P/)|(?P=)|(?P>)|(?P!)|(?P<)|(?P-)|(?P%)|(?P~)|(?P;)', [None, ('t_PPHASH', 'PPHASH'), ('t_NEWLINE', 'NEWLINE'), ('t_LBRACE', 'LBRACE'), ('t_RBRACE', 'RBRACE'), ('t_FLOAT_CONST', 'FLOAT_CONST'), None, None, None, None, None, None, None, None, None, ('t_HEX_FLOAT_CONST', 'HEX_FLOAT_CONST'), None, None, None, None, None, None, None, ('t_INT_CONST_HEX', 'INT_CONST_HEX'), None, None, None, None, None, None, None, ('t_INT_CONST_BIN', 'INT_CONST_BIN'), None, None, None, None, None, None, None, ('t_BAD_CONST_OCT', 'BAD_CONST_OCT'), ('t_UNSUPPORTED_C_STYLE_COMMENT', 'UNSUPPORTED_C_STYLE_COMMENT'), ('t_UNSUPPORTED_CXX_STYLE_COMMENT', 'UNSUPPORTED_CXX_STYLE_COMMENT'), ('t_INT_CONST_OCT', 'INT_CONST_OCT'), None, None, None, None, None, None, None, ('t_INT_CONST_DEC', 'INT_CONST_DEC'), None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ('t_INT_CONST_CHAR', 'INT_CONST_CHAR'), None, None, None, None, None, None, ('t_CHAR_CONST', 'CHAR_CONST'), None, None, None, None, None, None, ('t_WCHAR_CONST', 'WCHAR_CONST'), None, None, None, None, None, None, ('t_U8CHAR_CONST', 'U8CHAR_CONST'), None, None, None, None, None, None, ('t_U16CHAR_CONST', 'U16CHAR_CONST'), None, None, None, None, None, None, ('t_U32CHAR_CONST', 'U32CHAR_CONST'), None, None, None, None, None, None, ('t_UNMATCHED_QUOTE', 'UNMATCHED_QUOTE'), None, None, None, None, None, None, None, None, None, None, None, None, None, None, ('t_BAD_CHAR_CONST', 'BAD_CHAR_CONST'), None, None, None, None, None, None, None, None, None, None, ('t_WSTRING_LITERAL', 'WSTRING_LITERAL'), None, None, ('t_U8STRING_LITERAL', 'U8STRING_LITERAL'), None, None, ('t_U16STRING_LITERAL', 'U16STRING_LITERAL'), None, None, ('t_U32STRING_LITERAL', 'U32STRING_LITERAL'), None, None, ('t_BAD_STRING_LITERAL', 'BAD_STRING_LITERAL'), None, None, None, None, None, ('t_ID', 'ID'), (None, 'STRING_LITERAL'), None, None, (None, 'ELLIPSIS'), (None, 'LOR'), (None, 'PLUSPLUS'), (None, 'LSHIFTEQUAL'), (None, 'OREQUAL'), (None, 'PLUSEQUAL'), (None, 'RSHIFTEQUAL'), (None, 'TIMESEQUAL'), (None, 'XOREQUAL'), (None, 'ANDEQUAL'), (None, 'ARROW'), (None, 'CONDOP'), (None, 'DIVEQUAL'), (None, 'EQ'), (None, 'GE'), (None, 'LAND'), (None, 'LBRACKET'), (None, 'LE'), (None, 'LPAREN'), (None, 'LSHIFT'), (None, 'MINUSEQUAL'), (None, 'MINUSMINUS'), (None, 'MODEQUAL'), (None, 'NE'), (None, 'OR'), (None, 'PERIOD'), (None, 'PLUS'), (None, 'RBRACKET'), (None, 'RPAREN'), (None, 'RSHIFT'), (None, 'TIMES'), (None, 'XOR'), (None, 'AND'), (None, 'COLON'), (None, 'COMMA'), (None, 'DIVIDE'), (None, 'EQUALS'), (None, 'GT'), (None, 'LNOT'), (None, 'LT'), (None, 'MINUS'), (None, 'MOD'), (None, 'NOT'), (None, 'SEMI')])], 'ppline': [('(?P"([^"\\\\\\n]|(\\\\[0-9a-zA-Z._~!=&\\^\\-\\\\?\'"]))*")|(?P(0(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?)|([1-9][0-9]*(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?))|(?P\\n)|(?Pline)', [None, ('t_ppline_FILENAME', 'FILENAME'), None, None, ('t_ppline_LINE_NUMBER', 'LINE_NUMBER'), None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ('t_ppline_NEWLINE', 'NEWLINE'), ('t_ppline_PPLINE', 'PPLINE')])], 'pppragma': [('(?P\\n)|(?Ppragma)|(?P.+)', [None, ('t_pppragma_NEWLINE', 'NEWLINE'), ('t_pppragma_PPPRAGMA', 'PPPRAGMA'), ('t_pppragma_STR', 'STR')])]} +_lexstateignore = {'INITIAL': ' \t', 'ppline': ' \t', 'pppragma': ' \t'} +_lexstateerrorf = {'INITIAL': 't_error', 'ppline': 't_ppline_error', 'pppragma': 't_pppragma_error'} +_lexstateeoff = {} diff --git a/venv/lib/python3.12/site-packages/pycparser/ply/__init__.py b/venv/lib/python3.12/site-packages/pycparser/ply/__init__.py new file mode 100644 index 00000000..6e53cddc --- /dev/null +++ b/venv/lib/python3.12/site-packages/pycparser/ply/__init__.py @@ -0,0 +1,5 @@ +# PLY package +# Author: David Beazley (dave@dabeaz.com) + +__version__ = '3.9' +__all__ = ['lex','yacc'] diff --git a/venv/lib/python3.12/site-packages/pycparser/ply/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pycparser/ply/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..d955f1f6 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pycparser/ply/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pycparser/ply/__pycache__/cpp.cpython-312.pyc b/venv/lib/python3.12/site-packages/pycparser/ply/__pycache__/cpp.cpython-312.pyc new file mode 100644 index 00000000..49c9843f Binary files /dev/null and b/venv/lib/python3.12/site-packages/pycparser/ply/__pycache__/cpp.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pycparser/ply/__pycache__/ctokens.cpython-312.pyc b/venv/lib/python3.12/site-packages/pycparser/ply/__pycache__/ctokens.cpython-312.pyc new file mode 100644 index 00000000..1f30d442 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pycparser/ply/__pycache__/ctokens.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pycparser/ply/__pycache__/lex.cpython-312.pyc b/venv/lib/python3.12/site-packages/pycparser/ply/__pycache__/lex.cpython-312.pyc new file mode 100644 index 00000000..00fb9f3d Binary files /dev/null and b/venv/lib/python3.12/site-packages/pycparser/ply/__pycache__/lex.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pycparser/ply/__pycache__/yacc.cpython-312.pyc b/venv/lib/python3.12/site-packages/pycparser/ply/__pycache__/yacc.cpython-312.pyc new file mode 100644 index 00000000..63395bb3 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pycparser/ply/__pycache__/yacc.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pycparser/ply/__pycache__/ygen.cpython-312.pyc b/venv/lib/python3.12/site-packages/pycparser/ply/__pycache__/ygen.cpython-312.pyc new file mode 100644 index 00000000..a3412dc2 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pycparser/ply/__pycache__/ygen.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pycparser/ply/cpp.py b/venv/lib/python3.12/site-packages/pycparser/ply/cpp.py new file mode 100644 index 00000000..86273eac --- /dev/null +++ b/venv/lib/python3.12/site-packages/pycparser/ply/cpp.py @@ -0,0 +1,905 @@ +# ----------------------------------------------------------------------------- +# cpp.py +# +# Author: David Beazley (http://www.dabeaz.com) +# Copyright (C) 2017 +# All rights reserved +# +# This module implements an ANSI-C style lexical preprocessor for PLY. +# ----------------------------------------------------------------------------- +import sys + +# Some Python 3 compatibility shims +if sys.version_info.major < 3: + STRING_TYPES = (str, unicode) +else: + STRING_TYPES = str + xrange = range + +# ----------------------------------------------------------------------------- +# Default preprocessor lexer definitions. These tokens are enough to get +# a basic preprocessor working. Other modules may import these if they want +# ----------------------------------------------------------------------------- + +tokens = ( + 'CPP_ID','CPP_INTEGER', 'CPP_FLOAT', 'CPP_STRING', 'CPP_CHAR', 'CPP_WS', 'CPP_COMMENT1', 'CPP_COMMENT2', 'CPP_POUND','CPP_DPOUND' +) + +literals = "+-*/%|&~^<>=!?()[]{}.,;:\\\'\"" + +# Whitespace +def t_CPP_WS(t): + r'\s+' + t.lexer.lineno += t.value.count("\n") + return t + +t_CPP_POUND = r'\#' +t_CPP_DPOUND = r'\#\#' + +# Identifier +t_CPP_ID = r'[A-Za-z_][\w_]*' + +# Integer literal +def CPP_INTEGER(t): + r'(((((0x)|(0X))[0-9a-fA-F]+)|(\d+))([uU][lL]|[lL][uU]|[uU]|[lL])?)' + return t + +t_CPP_INTEGER = CPP_INTEGER + +# Floating literal +t_CPP_FLOAT = r'((\d+)(\.\d+)(e(\+|-)?(\d+))? | (\d+)e(\+|-)?(\d+))([lL]|[fF])?' + +# String literal +def t_CPP_STRING(t): + r'\"([^\\\n]|(\\(.|\n)))*?\"' + t.lexer.lineno += t.value.count("\n") + return t + +# Character constant 'c' or L'c' +def t_CPP_CHAR(t): + r'(L)?\'([^\\\n]|(\\(.|\n)))*?\'' + t.lexer.lineno += t.value.count("\n") + return t + +# Comment +def t_CPP_COMMENT1(t): + r'(/\*(.|\n)*?\*/)' + ncr = t.value.count("\n") + t.lexer.lineno += ncr + # replace with one space or a number of '\n' + t.type = 'CPP_WS'; t.value = '\n' * ncr if ncr else ' ' + return t + +# Line comment +def t_CPP_COMMENT2(t): + r'(//.*?(\n|$))' + # replace with '/n' + t.type = 'CPP_WS'; t.value = '\n' + return t + +def t_error(t): + t.type = t.value[0] + t.value = t.value[0] + t.lexer.skip(1) + return t + +import re +import copy +import time +import os.path + +# ----------------------------------------------------------------------------- +# trigraph() +# +# Given an input string, this function replaces all trigraph sequences. +# The following mapping is used: +# +# ??= # +# ??/ \ +# ??' ^ +# ??( [ +# ??) ] +# ??! | +# ??< { +# ??> } +# ??- ~ +# ----------------------------------------------------------------------------- + +_trigraph_pat = re.compile(r'''\?\?[=/\'\(\)\!<>\-]''') +_trigraph_rep = { + '=':'#', + '/':'\\', + "'":'^', + '(':'[', + ')':']', + '!':'|', + '<':'{', + '>':'}', + '-':'~' +} + +def trigraph(input): + return _trigraph_pat.sub(lambda g: _trigraph_rep[g.group()[-1]],input) + +# ------------------------------------------------------------------ +# Macro object +# +# This object holds information about preprocessor macros +# +# .name - Macro name (string) +# .value - Macro value (a list of tokens) +# .arglist - List of argument names +# .variadic - Boolean indicating whether or not variadic macro +# .vararg - Name of the variadic parameter +# +# When a macro is created, the macro replacement token sequence is +# pre-scanned and used to create patch lists that are later used +# during macro expansion +# ------------------------------------------------------------------ + +class Macro(object): + def __init__(self,name,value,arglist=None,variadic=False): + self.name = name + self.value = value + self.arglist = arglist + self.variadic = variadic + if variadic: + self.vararg = arglist[-1] + self.source = None + +# ------------------------------------------------------------------ +# Preprocessor object +# +# Object representing a preprocessor. Contains macro definitions, +# include directories, and other information +# ------------------------------------------------------------------ + +class Preprocessor(object): + def __init__(self,lexer=None): + if lexer is None: + lexer = lex.lexer + self.lexer = lexer + self.macros = { } + self.path = [] + self.temp_path = [] + + # Probe the lexer for selected tokens + self.lexprobe() + + tm = time.localtime() + self.define("__DATE__ \"%s\"" % time.strftime("%b %d %Y",tm)) + self.define("__TIME__ \"%s\"" % time.strftime("%H:%M:%S",tm)) + self.parser = None + + # ----------------------------------------------------------------------------- + # tokenize() + # + # Utility function. Given a string of text, tokenize into a list of tokens + # ----------------------------------------------------------------------------- + + def tokenize(self,text): + tokens = [] + self.lexer.input(text) + while True: + tok = self.lexer.token() + if not tok: break + tokens.append(tok) + return tokens + + # --------------------------------------------------------------------- + # error() + # + # Report a preprocessor error/warning of some kind + # ---------------------------------------------------------------------- + + def error(self,file,line,msg): + print("%s:%d %s" % (file,line,msg)) + + # ---------------------------------------------------------------------- + # lexprobe() + # + # This method probes the preprocessor lexer object to discover + # the token types of symbols that are important to the preprocessor. + # If this works right, the preprocessor will simply "work" + # with any suitable lexer regardless of how tokens have been named. + # ---------------------------------------------------------------------- + + def lexprobe(self): + + # Determine the token type for identifiers + self.lexer.input("identifier") + tok = self.lexer.token() + if not tok or tok.value != "identifier": + print("Couldn't determine identifier type") + else: + self.t_ID = tok.type + + # Determine the token type for integers + self.lexer.input("12345") + tok = self.lexer.token() + if not tok or int(tok.value) != 12345: + print("Couldn't determine integer type") + else: + self.t_INTEGER = tok.type + self.t_INTEGER_TYPE = type(tok.value) + + # Determine the token type for strings enclosed in double quotes + self.lexer.input("\"filename\"") + tok = self.lexer.token() + if not tok or tok.value != "\"filename\"": + print("Couldn't determine string type") + else: + self.t_STRING = tok.type + + # Determine the token type for whitespace--if any + self.lexer.input(" ") + tok = self.lexer.token() + if not tok or tok.value != " ": + self.t_SPACE = None + else: + self.t_SPACE = tok.type + + # Determine the token type for newlines + self.lexer.input("\n") + tok = self.lexer.token() + if not tok or tok.value != "\n": + self.t_NEWLINE = None + print("Couldn't determine token for newlines") + else: + self.t_NEWLINE = tok.type + + self.t_WS = (self.t_SPACE, self.t_NEWLINE) + + # Check for other characters used by the preprocessor + chars = [ '<','>','#','##','\\','(',')',',','.'] + for c in chars: + self.lexer.input(c) + tok = self.lexer.token() + if not tok or tok.value != c: + print("Unable to lex '%s' required for preprocessor" % c) + + # ---------------------------------------------------------------------- + # add_path() + # + # Adds a search path to the preprocessor. + # ---------------------------------------------------------------------- + + def add_path(self,path): + self.path.append(path) + + # ---------------------------------------------------------------------- + # group_lines() + # + # Given an input string, this function splits it into lines. Trailing whitespace + # is removed. Any line ending with \ is grouped with the next line. This + # function forms the lowest level of the preprocessor---grouping into text into + # a line-by-line format. + # ---------------------------------------------------------------------- + + def group_lines(self,input): + lex = self.lexer.clone() + lines = [x.rstrip() for x in input.splitlines()] + for i in xrange(len(lines)): + j = i+1 + while lines[i].endswith('\\') and (j < len(lines)): + lines[i] = lines[i][:-1]+lines[j] + lines[j] = "" + j += 1 + + input = "\n".join(lines) + lex.input(input) + lex.lineno = 1 + + current_line = [] + while True: + tok = lex.token() + if not tok: + break + current_line.append(tok) + if tok.type in self.t_WS and '\n' in tok.value: + yield current_line + current_line = [] + + if current_line: + yield current_line + + # ---------------------------------------------------------------------- + # tokenstrip() + # + # Remove leading/trailing whitespace tokens from a token list + # ---------------------------------------------------------------------- + + def tokenstrip(self,tokens): + i = 0 + while i < len(tokens) and tokens[i].type in self.t_WS: + i += 1 + del tokens[:i] + i = len(tokens)-1 + while i >= 0 and tokens[i].type in self.t_WS: + i -= 1 + del tokens[i+1:] + return tokens + + + # ---------------------------------------------------------------------- + # collect_args() + # + # Collects comma separated arguments from a list of tokens. The arguments + # must be enclosed in parenthesis. Returns a tuple (tokencount,args,positions) + # where tokencount is the number of tokens consumed, args is a list of arguments, + # and positions is a list of integers containing the starting index of each + # argument. Each argument is represented by a list of tokens. + # + # When collecting arguments, leading and trailing whitespace is removed + # from each argument. + # + # This function properly handles nested parenthesis and commas---these do not + # define new arguments. + # ---------------------------------------------------------------------- + + def collect_args(self,tokenlist): + args = [] + positions = [] + current_arg = [] + nesting = 1 + tokenlen = len(tokenlist) + + # Search for the opening '('. + i = 0 + while (i < tokenlen) and (tokenlist[i].type in self.t_WS): + i += 1 + + if (i < tokenlen) and (tokenlist[i].value == '('): + positions.append(i+1) + else: + self.error(self.source,tokenlist[0].lineno,"Missing '(' in macro arguments") + return 0, [], [] + + i += 1 + + while i < tokenlen: + t = tokenlist[i] + if t.value == '(': + current_arg.append(t) + nesting += 1 + elif t.value == ')': + nesting -= 1 + if nesting == 0: + if current_arg: + args.append(self.tokenstrip(current_arg)) + positions.append(i) + return i+1,args,positions + current_arg.append(t) + elif t.value == ',' and nesting == 1: + args.append(self.tokenstrip(current_arg)) + positions.append(i+1) + current_arg = [] + else: + current_arg.append(t) + i += 1 + + # Missing end argument + self.error(self.source,tokenlist[-1].lineno,"Missing ')' in macro arguments") + return 0, [],[] + + # ---------------------------------------------------------------------- + # macro_prescan() + # + # Examine the macro value (token sequence) and identify patch points + # This is used to speed up macro expansion later on---we'll know + # right away where to apply patches to the value to form the expansion + # ---------------------------------------------------------------------- + + def macro_prescan(self,macro): + macro.patch = [] # Standard macro arguments + macro.str_patch = [] # String conversion expansion + macro.var_comma_patch = [] # Variadic macro comma patch + i = 0 + while i < len(macro.value): + if macro.value[i].type == self.t_ID and macro.value[i].value in macro.arglist: + argnum = macro.arglist.index(macro.value[i].value) + # Conversion of argument to a string + if i > 0 and macro.value[i-1].value == '#': + macro.value[i] = copy.copy(macro.value[i]) + macro.value[i].type = self.t_STRING + del macro.value[i-1] + macro.str_patch.append((argnum,i-1)) + continue + # Concatenation + elif (i > 0 and macro.value[i-1].value == '##'): + macro.patch.append(('c',argnum,i-1)) + del macro.value[i-1] + continue + elif ((i+1) < len(macro.value) and macro.value[i+1].value == '##'): + macro.patch.append(('c',argnum,i)) + i += 1 + continue + # Standard expansion + else: + macro.patch.append(('e',argnum,i)) + elif macro.value[i].value == '##': + if macro.variadic and (i > 0) and (macro.value[i-1].value == ',') and \ + ((i+1) < len(macro.value)) and (macro.value[i+1].type == self.t_ID) and \ + (macro.value[i+1].value == macro.vararg): + macro.var_comma_patch.append(i-1) + i += 1 + macro.patch.sort(key=lambda x: x[2],reverse=True) + + # ---------------------------------------------------------------------- + # macro_expand_args() + # + # Given a Macro and list of arguments (each a token list), this method + # returns an expanded version of a macro. The return value is a token sequence + # representing the replacement macro tokens + # ---------------------------------------------------------------------- + + def macro_expand_args(self,macro,args): + # Make a copy of the macro token sequence + rep = [copy.copy(_x) for _x in macro.value] + + # Make string expansion patches. These do not alter the length of the replacement sequence + + str_expansion = {} + for argnum, i in macro.str_patch: + if argnum not in str_expansion: + str_expansion[argnum] = ('"%s"' % "".join([x.value for x in args[argnum]])).replace("\\","\\\\") + rep[i] = copy.copy(rep[i]) + rep[i].value = str_expansion[argnum] + + # Make the variadic macro comma patch. If the variadic macro argument is empty, we get rid + comma_patch = False + if macro.variadic and not args[-1]: + for i in macro.var_comma_patch: + rep[i] = None + comma_patch = True + + # Make all other patches. The order of these matters. It is assumed that the patch list + # has been sorted in reverse order of patch location since replacements will cause the + # size of the replacement sequence to expand from the patch point. + + expanded = { } + for ptype, argnum, i in macro.patch: + # Concatenation. Argument is left unexpanded + if ptype == 'c': + rep[i:i+1] = args[argnum] + # Normal expansion. Argument is macro expanded first + elif ptype == 'e': + if argnum not in expanded: + expanded[argnum] = self.expand_macros(args[argnum]) + rep[i:i+1] = expanded[argnum] + + # Get rid of removed comma if necessary + if comma_patch: + rep = [_i for _i in rep if _i] + + return rep + + + # ---------------------------------------------------------------------- + # expand_macros() + # + # Given a list of tokens, this function performs macro expansion. + # The expanded argument is a dictionary that contains macros already + # expanded. This is used to prevent infinite recursion. + # ---------------------------------------------------------------------- + + def expand_macros(self,tokens,expanded=None): + if expanded is None: + expanded = {} + i = 0 + while i < len(tokens): + t = tokens[i] + if t.type == self.t_ID: + if t.value in self.macros and t.value not in expanded: + # Yes, we found a macro match + expanded[t.value] = True + + m = self.macros[t.value] + if not m.arglist: + # A simple macro + ex = self.expand_macros([copy.copy(_x) for _x in m.value],expanded) + for e in ex: + e.lineno = t.lineno + tokens[i:i+1] = ex + i += len(ex) + else: + # A macro with arguments + j = i + 1 + while j < len(tokens) and tokens[j].type in self.t_WS: + j += 1 + if tokens[j].value == '(': + tokcount,args,positions = self.collect_args(tokens[j:]) + if not m.variadic and len(args) != len(m.arglist): + self.error(self.source,t.lineno,"Macro %s requires %d arguments" % (t.value,len(m.arglist))) + i = j + tokcount + elif m.variadic and len(args) < len(m.arglist)-1: + if len(m.arglist) > 2: + self.error(self.source,t.lineno,"Macro %s must have at least %d arguments" % (t.value, len(m.arglist)-1)) + else: + self.error(self.source,t.lineno,"Macro %s must have at least %d argument" % (t.value, len(m.arglist)-1)) + i = j + tokcount + else: + if m.variadic: + if len(args) == len(m.arglist)-1: + args.append([]) + else: + args[len(m.arglist)-1] = tokens[j+positions[len(m.arglist)-1]:j+tokcount-1] + del args[len(m.arglist):] + + # Get macro replacement text + rep = self.macro_expand_args(m,args) + rep = self.expand_macros(rep,expanded) + for r in rep: + r.lineno = t.lineno + tokens[i:j+tokcount] = rep + i += len(rep) + del expanded[t.value] + continue + elif t.value == '__LINE__': + t.type = self.t_INTEGER + t.value = self.t_INTEGER_TYPE(t.lineno) + + i += 1 + return tokens + + # ---------------------------------------------------------------------- + # evalexpr() + # + # Evaluate an expression token sequence for the purposes of evaluating + # integral expressions. + # ---------------------------------------------------------------------- + + def evalexpr(self,tokens): + # tokens = tokenize(line) + # Search for defined macros + i = 0 + while i < len(tokens): + if tokens[i].type == self.t_ID and tokens[i].value == 'defined': + j = i + 1 + needparen = False + result = "0L" + while j < len(tokens): + if tokens[j].type in self.t_WS: + j += 1 + continue + elif tokens[j].type == self.t_ID: + if tokens[j].value in self.macros: + result = "1L" + else: + result = "0L" + if not needparen: break + elif tokens[j].value == '(': + needparen = True + elif tokens[j].value == ')': + break + else: + self.error(self.source,tokens[i].lineno,"Malformed defined()") + j += 1 + tokens[i].type = self.t_INTEGER + tokens[i].value = self.t_INTEGER_TYPE(result) + del tokens[i+1:j+1] + i += 1 + tokens = self.expand_macros(tokens) + for i,t in enumerate(tokens): + if t.type == self.t_ID: + tokens[i] = copy.copy(t) + tokens[i].type = self.t_INTEGER + tokens[i].value = self.t_INTEGER_TYPE("0L") + elif t.type == self.t_INTEGER: + tokens[i] = copy.copy(t) + # Strip off any trailing suffixes + tokens[i].value = str(tokens[i].value) + while tokens[i].value[-1] not in "0123456789abcdefABCDEF": + tokens[i].value = tokens[i].value[:-1] + + expr = "".join([str(x.value) for x in tokens]) + expr = expr.replace("&&"," and ") + expr = expr.replace("||"," or ") + expr = expr.replace("!"," not ") + try: + result = eval(expr) + except Exception: + self.error(self.source,tokens[0].lineno,"Couldn't evaluate expression") + result = 0 + return result + + # ---------------------------------------------------------------------- + # parsegen() + # + # Parse an input string/ + # ---------------------------------------------------------------------- + def parsegen(self,input,source=None): + + # Replace trigraph sequences + t = trigraph(input) + lines = self.group_lines(t) + + if not source: + source = "" + + self.define("__FILE__ \"%s\"" % source) + + self.source = source + chunk = [] + enable = True + iftrigger = False + ifstack = [] + + for x in lines: + for i,tok in enumerate(x): + if tok.type not in self.t_WS: break + if tok.value == '#': + # Preprocessor directive + + # insert necessary whitespace instead of eaten tokens + for tok in x: + if tok.type in self.t_WS and '\n' in tok.value: + chunk.append(tok) + + dirtokens = self.tokenstrip(x[i+1:]) + if dirtokens: + name = dirtokens[0].value + args = self.tokenstrip(dirtokens[1:]) + else: + name = "" + args = [] + + if name == 'define': + if enable: + for tok in self.expand_macros(chunk): + yield tok + chunk = [] + self.define(args) + elif name == 'include': + if enable: + for tok in self.expand_macros(chunk): + yield tok + chunk = [] + oldfile = self.macros['__FILE__'] + for tok in self.include(args): + yield tok + self.macros['__FILE__'] = oldfile + self.source = source + elif name == 'undef': + if enable: + for tok in self.expand_macros(chunk): + yield tok + chunk = [] + self.undef(args) + elif name == 'ifdef': + ifstack.append((enable,iftrigger)) + if enable: + if not args[0].value in self.macros: + enable = False + iftrigger = False + else: + iftrigger = True + elif name == 'ifndef': + ifstack.append((enable,iftrigger)) + if enable: + if args[0].value in self.macros: + enable = False + iftrigger = False + else: + iftrigger = True + elif name == 'if': + ifstack.append((enable,iftrigger)) + if enable: + result = self.evalexpr(args) + if not result: + enable = False + iftrigger = False + else: + iftrigger = True + elif name == 'elif': + if ifstack: + if ifstack[-1][0]: # We only pay attention if outer "if" allows this + if enable: # If already true, we flip enable False + enable = False + elif not iftrigger: # If False, but not triggered yet, we'll check expression + result = self.evalexpr(args) + if result: + enable = True + iftrigger = True + else: + self.error(self.source,dirtokens[0].lineno,"Misplaced #elif") + + elif name == 'else': + if ifstack: + if ifstack[-1][0]: + if enable: + enable = False + elif not iftrigger: + enable = True + iftrigger = True + else: + self.error(self.source,dirtokens[0].lineno,"Misplaced #else") + + elif name == 'endif': + if ifstack: + enable,iftrigger = ifstack.pop() + else: + self.error(self.source,dirtokens[0].lineno,"Misplaced #endif") + else: + # Unknown preprocessor directive + pass + + else: + # Normal text + if enable: + chunk.extend(x) + + for tok in self.expand_macros(chunk): + yield tok + chunk = [] + + # ---------------------------------------------------------------------- + # include() + # + # Implementation of file-inclusion + # ---------------------------------------------------------------------- + + def include(self,tokens): + # Try to extract the filename and then process an include file + if not tokens: + return + if tokens: + if tokens[0].value != '<' and tokens[0].type != self.t_STRING: + tokens = self.expand_macros(tokens) + + if tokens[0].value == '<': + # Include <...> + i = 1 + while i < len(tokens): + if tokens[i].value == '>': + break + i += 1 + else: + print("Malformed #include <...>") + return + filename = "".join([x.value for x in tokens[1:i]]) + path = self.path + [""] + self.temp_path + elif tokens[0].type == self.t_STRING: + filename = tokens[0].value[1:-1] + path = self.temp_path + [""] + self.path + else: + print("Malformed #include statement") + return + for p in path: + iname = os.path.join(p,filename) + try: + data = open(iname,"r").read() + dname = os.path.dirname(iname) + if dname: + self.temp_path.insert(0,dname) + for tok in self.parsegen(data,filename): + yield tok + if dname: + del self.temp_path[0] + break + except IOError: + pass + else: + print("Couldn't find '%s'" % filename) + + # ---------------------------------------------------------------------- + # define() + # + # Define a new macro + # ---------------------------------------------------------------------- + + def define(self,tokens): + if isinstance(tokens,STRING_TYPES): + tokens = self.tokenize(tokens) + + linetok = tokens + try: + name = linetok[0] + if len(linetok) > 1: + mtype = linetok[1] + else: + mtype = None + if not mtype: + m = Macro(name.value,[]) + self.macros[name.value] = m + elif mtype.type in self.t_WS: + # A normal macro + m = Macro(name.value,self.tokenstrip(linetok[2:])) + self.macros[name.value] = m + elif mtype.value == '(': + # A macro with arguments + tokcount, args, positions = self.collect_args(linetok[1:]) + variadic = False + for a in args: + if variadic: + print("No more arguments may follow a variadic argument") + break + astr = "".join([str(_i.value) for _i in a]) + if astr == "...": + variadic = True + a[0].type = self.t_ID + a[0].value = '__VA_ARGS__' + variadic = True + del a[1:] + continue + elif astr[-3:] == "..." and a[0].type == self.t_ID: + variadic = True + del a[1:] + # If, for some reason, "." is part of the identifier, strip off the name for the purposes + # of macro expansion + if a[0].value[-3:] == '...': + a[0].value = a[0].value[:-3] + continue + if len(a) > 1 or a[0].type != self.t_ID: + print("Invalid macro argument") + break + else: + mvalue = self.tokenstrip(linetok[1+tokcount:]) + i = 0 + while i < len(mvalue): + if i+1 < len(mvalue): + if mvalue[i].type in self.t_WS and mvalue[i+1].value == '##': + del mvalue[i] + continue + elif mvalue[i].value == '##' and mvalue[i+1].type in self.t_WS: + del mvalue[i+1] + i += 1 + m = Macro(name.value,mvalue,[x[0].value for x in args],variadic) + self.macro_prescan(m) + self.macros[name.value] = m + else: + print("Bad macro definition") + except LookupError: + print("Bad macro definition") + + # ---------------------------------------------------------------------- + # undef() + # + # Undefine a macro + # ---------------------------------------------------------------------- + + def undef(self,tokens): + id = tokens[0].value + try: + del self.macros[id] + except LookupError: + pass + + # ---------------------------------------------------------------------- + # parse() + # + # Parse input text. + # ---------------------------------------------------------------------- + def parse(self,input,source=None,ignore={}): + self.ignore = ignore + self.parser = self.parsegen(input,source) + + # ---------------------------------------------------------------------- + # token() + # + # Method to return individual tokens + # ---------------------------------------------------------------------- + def token(self): + try: + while True: + tok = next(self.parser) + if tok.type not in self.ignore: return tok + except StopIteration: + self.parser = None + return None + +if __name__ == '__main__': + import ply.lex as lex + lexer = lex.lex() + + # Run a preprocessor + import sys + f = open(sys.argv[1]) + input = f.read() + + p = Preprocessor(lexer) + p.parse(input,sys.argv[1]) + while True: + tok = p.token() + if not tok: break + print(p.source, tok) diff --git a/venv/lib/python3.12/site-packages/pycparser/ply/ctokens.py b/venv/lib/python3.12/site-packages/pycparser/ply/ctokens.py new file mode 100644 index 00000000..f6f6952d --- /dev/null +++ b/venv/lib/python3.12/site-packages/pycparser/ply/ctokens.py @@ -0,0 +1,133 @@ +# ---------------------------------------------------------------------- +# ctokens.py +# +# Token specifications for symbols in ANSI C and C++. This file is +# meant to be used as a library in other tokenizers. +# ---------------------------------------------------------------------- + +# Reserved words + +tokens = [ + # Literals (identifier, integer constant, float constant, string constant, char const) + 'ID', 'TYPEID', 'INTEGER', 'FLOAT', 'STRING', 'CHARACTER', + + # Operators (+,-,*,/,%,|,&,~,^,<<,>>, ||, &&, !, <, <=, >, >=, ==, !=) + 'PLUS', 'MINUS', 'TIMES', 'DIVIDE', 'MODULO', + 'OR', 'AND', 'NOT', 'XOR', 'LSHIFT', 'RSHIFT', + 'LOR', 'LAND', 'LNOT', + 'LT', 'LE', 'GT', 'GE', 'EQ', 'NE', + + # Assignment (=, *=, /=, %=, +=, -=, <<=, >>=, &=, ^=, |=) + 'EQUALS', 'TIMESEQUAL', 'DIVEQUAL', 'MODEQUAL', 'PLUSEQUAL', 'MINUSEQUAL', + 'LSHIFTEQUAL','RSHIFTEQUAL', 'ANDEQUAL', 'XOREQUAL', 'OREQUAL', + + # Increment/decrement (++,--) + 'INCREMENT', 'DECREMENT', + + # Structure dereference (->) + 'ARROW', + + # Ternary operator (?) + 'TERNARY', + + # Delimeters ( ) [ ] { } , . ; : + 'LPAREN', 'RPAREN', + 'LBRACKET', 'RBRACKET', + 'LBRACE', 'RBRACE', + 'COMMA', 'PERIOD', 'SEMI', 'COLON', + + # Ellipsis (...) + 'ELLIPSIS', +] + +# Operators +t_PLUS = r'\+' +t_MINUS = r'-' +t_TIMES = r'\*' +t_DIVIDE = r'/' +t_MODULO = r'%' +t_OR = r'\|' +t_AND = r'&' +t_NOT = r'~' +t_XOR = r'\^' +t_LSHIFT = r'<<' +t_RSHIFT = r'>>' +t_LOR = r'\|\|' +t_LAND = r'&&' +t_LNOT = r'!' +t_LT = r'<' +t_GT = r'>' +t_LE = r'<=' +t_GE = r'>=' +t_EQ = r'==' +t_NE = r'!=' + +# Assignment operators + +t_EQUALS = r'=' +t_TIMESEQUAL = r'\*=' +t_DIVEQUAL = r'/=' +t_MODEQUAL = r'%=' +t_PLUSEQUAL = r'\+=' +t_MINUSEQUAL = r'-=' +t_LSHIFTEQUAL = r'<<=' +t_RSHIFTEQUAL = r'>>=' +t_ANDEQUAL = r'&=' +t_OREQUAL = r'\|=' +t_XOREQUAL = r'\^=' + +# Increment/decrement +t_INCREMENT = r'\+\+' +t_DECREMENT = r'--' + +# -> +t_ARROW = r'->' + +# ? +t_TERNARY = r'\?' + +# Delimeters +t_LPAREN = r'\(' +t_RPAREN = r'\)' +t_LBRACKET = r'\[' +t_RBRACKET = r'\]' +t_LBRACE = r'\{' +t_RBRACE = r'\}' +t_COMMA = r',' +t_PERIOD = r'\.' +t_SEMI = r';' +t_COLON = r':' +t_ELLIPSIS = r'\.\.\.' + +# Identifiers +t_ID = r'[A-Za-z_][A-Za-z0-9_]*' + +# Integer literal +t_INTEGER = r'\d+([uU]|[lL]|[uU][lL]|[lL][uU])?' + +# Floating literal +t_FLOAT = r'((\d+)(\.\d+)(e(\+|-)?(\d+))? | (\d+)e(\+|-)?(\d+))([lL]|[fF])?' + +# String literal +t_STRING = r'\"([^\\\n]|(\\.))*?\"' + +# Character constant 'c' or L'c' +t_CHARACTER = r'(L)?\'([^\\\n]|(\\.))*?\'' + +# Comment (C-Style) +def t_COMMENT(t): + r'/\*(.|\n)*?\*/' + t.lexer.lineno += t.value.count('\n') + return t + +# Comment (C++-Style) +def t_CPPCOMMENT(t): + r'//.*\n' + t.lexer.lineno += 1 + return t + + + + + + diff --git a/venv/lib/python3.12/site-packages/pycparser/ply/lex.py b/venv/lib/python3.12/site-packages/pycparser/ply/lex.py new file mode 100644 index 00000000..dfc51394 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pycparser/ply/lex.py @@ -0,0 +1,1099 @@ +# ----------------------------------------------------------------------------- +# ply: lex.py +# +# Copyright (C) 2001-2017 +# David M. Beazley (Dabeaz LLC) +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# * Neither the name of the David Beazley or Dabeaz LLC may be used to +# endorse or promote products derived from this software without +# specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# ----------------------------------------------------------------------------- + +__version__ = '3.10' +__tabversion__ = '3.10' + +import re +import sys +import types +import copy +import os +import inspect + +# This tuple contains known string types +try: + # Python 2.6 + StringTypes = (types.StringType, types.UnicodeType) +except AttributeError: + # Python 3.0 + StringTypes = (str, bytes) + +# This regular expression is used to match valid token names +_is_identifier = re.compile(r'^[a-zA-Z0-9_]+$') + +# Exception thrown when invalid token encountered and no default error +# handler is defined. +class LexError(Exception): + def __init__(self, message, s): + self.args = (message,) + self.text = s + + +# Token class. This class is used to represent the tokens produced. +class LexToken(object): + def __str__(self): + return 'LexToken(%s,%r,%d,%d)' % (self.type, self.value, self.lineno, self.lexpos) + + def __repr__(self): + return str(self) + + +# This object is a stand-in for a logging object created by the +# logging module. + +class PlyLogger(object): + def __init__(self, f): + self.f = f + + def critical(self, msg, *args, **kwargs): + self.f.write((msg % args) + '\n') + + def warning(self, msg, *args, **kwargs): + self.f.write('WARNING: ' + (msg % args) + '\n') + + def error(self, msg, *args, **kwargs): + self.f.write('ERROR: ' + (msg % args) + '\n') + + info = critical + debug = critical + + +# Null logger is used when no output is generated. Does nothing. +class NullLogger(object): + def __getattribute__(self, name): + return self + + def __call__(self, *args, **kwargs): + return self + + +# ----------------------------------------------------------------------------- +# === Lexing Engine === +# +# The following Lexer class implements the lexer runtime. There are only +# a few public methods and attributes: +# +# input() - Store a new string in the lexer +# token() - Get the next token +# clone() - Clone the lexer +# +# lineno - Current line number +# lexpos - Current position in the input string +# ----------------------------------------------------------------------------- + +class Lexer: + def __init__(self): + self.lexre = None # Master regular expression. This is a list of + # tuples (re, findex) where re is a compiled + # regular expression and findex is a list + # mapping regex group numbers to rules + self.lexretext = None # Current regular expression strings + self.lexstatere = {} # Dictionary mapping lexer states to master regexs + self.lexstateretext = {} # Dictionary mapping lexer states to regex strings + self.lexstaterenames = {} # Dictionary mapping lexer states to symbol names + self.lexstate = 'INITIAL' # Current lexer state + self.lexstatestack = [] # Stack of lexer states + self.lexstateinfo = None # State information + self.lexstateignore = {} # Dictionary of ignored characters for each state + self.lexstateerrorf = {} # Dictionary of error functions for each state + self.lexstateeoff = {} # Dictionary of eof functions for each state + self.lexreflags = 0 # Optional re compile flags + self.lexdata = None # Actual input data (as a string) + self.lexpos = 0 # Current position in input text + self.lexlen = 0 # Length of the input text + self.lexerrorf = None # Error rule (if any) + self.lexeoff = None # EOF rule (if any) + self.lextokens = None # List of valid tokens + self.lexignore = '' # Ignored characters + self.lexliterals = '' # Literal characters that can be passed through + self.lexmodule = None # Module + self.lineno = 1 # Current line number + self.lexoptimize = False # Optimized mode + + def clone(self, object=None): + c = copy.copy(self) + + # If the object parameter has been supplied, it means we are attaching the + # lexer to a new object. In this case, we have to rebind all methods in + # the lexstatere and lexstateerrorf tables. + + if object: + newtab = {} + for key, ritem in self.lexstatere.items(): + newre = [] + for cre, findex in ritem: + newfindex = [] + for f in findex: + if not f or not f[0]: + newfindex.append(f) + continue + newfindex.append((getattr(object, f[0].__name__), f[1])) + newre.append((cre, newfindex)) + newtab[key] = newre + c.lexstatere = newtab + c.lexstateerrorf = {} + for key, ef in self.lexstateerrorf.items(): + c.lexstateerrorf[key] = getattr(object, ef.__name__) + c.lexmodule = object + return c + + # ------------------------------------------------------------ + # writetab() - Write lexer information to a table file + # ------------------------------------------------------------ + def writetab(self, lextab, outputdir=''): + if isinstance(lextab, types.ModuleType): + raise IOError("Won't overwrite existing lextab module") + basetabmodule = lextab.split('.')[-1] + filename = os.path.join(outputdir, basetabmodule) + '.py' + with open(filename, 'w') as tf: + tf.write('# %s.py. This file automatically created by PLY (version %s). Don\'t edit!\n' % (basetabmodule, __version__)) + tf.write('_tabversion = %s\n' % repr(__tabversion__)) + tf.write('_lextokens = set(%s)\n' % repr(tuple(sorted(self.lextokens)))) + tf.write('_lexreflags = %s\n' % repr(self.lexreflags)) + tf.write('_lexliterals = %s\n' % repr(self.lexliterals)) + tf.write('_lexstateinfo = %s\n' % repr(self.lexstateinfo)) + + # Rewrite the lexstatere table, replacing function objects with function names + tabre = {} + for statename, lre in self.lexstatere.items(): + titem = [] + for (pat, func), retext, renames in zip(lre, self.lexstateretext[statename], self.lexstaterenames[statename]): + titem.append((retext, _funcs_to_names(func, renames))) + tabre[statename] = titem + + tf.write('_lexstatere = %s\n' % repr(tabre)) + tf.write('_lexstateignore = %s\n' % repr(self.lexstateignore)) + + taberr = {} + for statename, ef in self.lexstateerrorf.items(): + taberr[statename] = ef.__name__ if ef else None + tf.write('_lexstateerrorf = %s\n' % repr(taberr)) + + tabeof = {} + for statename, ef in self.lexstateeoff.items(): + tabeof[statename] = ef.__name__ if ef else None + tf.write('_lexstateeoff = %s\n' % repr(tabeof)) + + # ------------------------------------------------------------ + # readtab() - Read lexer information from a tab file + # ------------------------------------------------------------ + def readtab(self, tabfile, fdict): + if isinstance(tabfile, types.ModuleType): + lextab = tabfile + else: + exec('import %s' % tabfile) + lextab = sys.modules[tabfile] + + if getattr(lextab, '_tabversion', '0.0') != __tabversion__: + raise ImportError('Inconsistent PLY version') + + self.lextokens = lextab._lextokens + self.lexreflags = lextab._lexreflags + self.lexliterals = lextab._lexliterals + self.lextokens_all = self.lextokens | set(self.lexliterals) + self.lexstateinfo = lextab._lexstateinfo + self.lexstateignore = lextab._lexstateignore + self.lexstatere = {} + self.lexstateretext = {} + for statename, lre in lextab._lexstatere.items(): + titem = [] + txtitem = [] + for pat, func_name in lre: + titem.append((re.compile(pat, lextab._lexreflags), _names_to_funcs(func_name, fdict))) + + self.lexstatere[statename] = titem + self.lexstateretext[statename] = txtitem + + self.lexstateerrorf = {} + for statename, ef in lextab._lexstateerrorf.items(): + self.lexstateerrorf[statename] = fdict[ef] + + self.lexstateeoff = {} + for statename, ef in lextab._lexstateeoff.items(): + self.lexstateeoff[statename] = fdict[ef] + + self.begin('INITIAL') + + # ------------------------------------------------------------ + # input() - Push a new string into the lexer + # ------------------------------------------------------------ + def input(self, s): + # Pull off the first character to see if s looks like a string + c = s[:1] + if not isinstance(c, StringTypes): + raise ValueError('Expected a string') + self.lexdata = s + self.lexpos = 0 + self.lexlen = len(s) + + # ------------------------------------------------------------ + # begin() - Changes the lexing state + # ------------------------------------------------------------ + def begin(self, state): + if state not in self.lexstatere: + raise ValueError('Undefined state') + self.lexre = self.lexstatere[state] + self.lexretext = self.lexstateretext[state] + self.lexignore = self.lexstateignore.get(state, '') + self.lexerrorf = self.lexstateerrorf.get(state, None) + self.lexeoff = self.lexstateeoff.get(state, None) + self.lexstate = state + + # ------------------------------------------------------------ + # push_state() - Changes the lexing state and saves old on stack + # ------------------------------------------------------------ + def push_state(self, state): + self.lexstatestack.append(self.lexstate) + self.begin(state) + + # ------------------------------------------------------------ + # pop_state() - Restores the previous state + # ------------------------------------------------------------ + def pop_state(self): + self.begin(self.lexstatestack.pop()) + + # ------------------------------------------------------------ + # current_state() - Returns the current lexing state + # ------------------------------------------------------------ + def current_state(self): + return self.lexstate + + # ------------------------------------------------------------ + # skip() - Skip ahead n characters + # ------------------------------------------------------------ + def skip(self, n): + self.lexpos += n + + # ------------------------------------------------------------ + # opttoken() - Return the next token from the Lexer + # + # Note: This function has been carefully implemented to be as fast + # as possible. Don't make changes unless you really know what + # you are doing + # ------------------------------------------------------------ + def token(self): + # Make local copies of frequently referenced attributes + lexpos = self.lexpos + lexlen = self.lexlen + lexignore = self.lexignore + lexdata = self.lexdata + + while lexpos < lexlen: + # This code provides some short-circuit code for whitespace, tabs, and other ignored characters + if lexdata[lexpos] in lexignore: + lexpos += 1 + continue + + # Look for a regular expression match + for lexre, lexindexfunc in self.lexre: + m = lexre.match(lexdata, lexpos) + if not m: + continue + + # Create a token for return + tok = LexToken() + tok.value = m.group() + tok.lineno = self.lineno + tok.lexpos = lexpos + + i = m.lastindex + func, tok.type = lexindexfunc[i] + + if not func: + # If no token type was set, it's an ignored token + if tok.type: + self.lexpos = m.end() + return tok + else: + lexpos = m.end() + break + + lexpos = m.end() + + # If token is processed by a function, call it + + tok.lexer = self # Set additional attributes useful in token rules + self.lexmatch = m + self.lexpos = lexpos + + newtok = func(tok) + + # Every function must return a token, if nothing, we just move to next token + if not newtok: + lexpos = self.lexpos # This is here in case user has updated lexpos. + lexignore = self.lexignore # This is here in case there was a state change + break + + # Verify type of the token. If not in the token map, raise an error + if not self.lexoptimize: + if newtok.type not in self.lextokens_all: + raise LexError("%s:%d: Rule '%s' returned an unknown token type '%s'" % ( + func.__code__.co_filename, func.__code__.co_firstlineno, + func.__name__, newtok.type), lexdata[lexpos:]) + + return newtok + else: + # No match, see if in literals + if lexdata[lexpos] in self.lexliterals: + tok = LexToken() + tok.value = lexdata[lexpos] + tok.lineno = self.lineno + tok.type = tok.value + tok.lexpos = lexpos + self.lexpos = lexpos + 1 + return tok + + # No match. Call t_error() if defined. + if self.lexerrorf: + tok = LexToken() + tok.value = self.lexdata[lexpos:] + tok.lineno = self.lineno + tok.type = 'error' + tok.lexer = self + tok.lexpos = lexpos + self.lexpos = lexpos + newtok = self.lexerrorf(tok) + if lexpos == self.lexpos: + # Error method didn't change text position at all. This is an error. + raise LexError("Scanning error. Illegal character '%s'" % (lexdata[lexpos]), lexdata[lexpos:]) + lexpos = self.lexpos + if not newtok: + continue + return newtok + + self.lexpos = lexpos + raise LexError("Illegal character '%s' at index %d" % (lexdata[lexpos], lexpos), lexdata[lexpos:]) + + if self.lexeoff: + tok = LexToken() + tok.type = 'eof' + tok.value = '' + tok.lineno = self.lineno + tok.lexpos = lexpos + tok.lexer = self + self.lexpos = lexpos + newtok = self.lexeoff(tok) + return newtok + + self.lexpos = lexpos + 1 + if self.lexdata is None: + raise RuntimeError('No input string given with input()') + return None + + # Iterator interface + def __iter__(self): + return self + + def next(self): + t = self.token() + if t is None: + raise StopIteration + return t + + __next__ = next + +# ----------------------------------------------------------------------------- +# ==== Lex Builder === +# +# The functions and classes below are used to collect lexing information +# and build a Lexer object from it. +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# _get_regex(func) +# +# Returns the regular expression assigned to a function either as a doc string +# or as a .regex attribute attached by the @TOKEN decorator. +# ----------------------------------------------------------------------------- +def _get_regex(func): + return getattr(func, 'regex', func.__doc__) + +# ----------------------------------------------------------------------------- +# get_caller_module_dict() +# +# This function returns a dictionary containing all of the symbols defined within +# a caller further down the call stack. This is used to get the environment +# associated with the yacc() call if none was provided. +# ----------------------------------------------------------------------------- +def get_caller_module_dict(levels): + f = sys._getframe(levels) + ldict = f.f_globals.copy() + if f.f_globals != f.f_locals: + ldict.update(f.f_locals) + return ldict + +# ----------------------------------------------------------------------------- +# _funcs_to_names() +# +# Given a list of regular expression functions, this converts it to a list +# suitable for output to a table file +# ----------------------------------------------------------------------------- +def _funcs_to_names(funclist, namelist): + result = [] + for f, name in zip(funclist, namelist): + if f and f[0]: + result.append((name, f[1])) + else: + result.append(f) + return result + +# ----------------------------------------------------------------------------- +# _names_to_funcs() +# +# Given a list of regular expression function names, this converts it back to +# functions. +# ----------------------------------------------------------------------------- +def _names_to_funcs(namelist, fdict): + result = [] + for n in namelist: + if n and n[0]: + result.append((fdict[n[0]], n[1])) + else: + result.append(n) + return result + +# ----------------------------------------------------------------------------- +# _form_master_re() +# +# This function takes a list of all of the regex components and attempts to +# form the master regular expression. Given limitations in the Python re +# module, it may be necessary to break the master regex into separate expressions. +# ----------------------------------------------------------------------------- +def _form_master_re(relist, reflags, ldict, toknames): + if not relist: + return [] + regex = '|'.join(relist) + try: + lexre = re.compile(regex, reflags) + + # Build the index to function map for the matching engine + lexindexfunc = [None] * (max(lexre.groupindex.values()) + 1) + lexindexnames = lexindexfunc[:] + + for f, i in lexre.groupindex.items(): + handle = ldict.get(f, None) + if type(handle) in (types.FunctionType, types.MethodType): + lexindexfunc[i] = (handle, toknames[f]) + lexindexnames[i] = f + elif handle is not None: + lexindexnames[i] = f + if f.find('ignore_') > 0: + lexindexfunc[i] = (None, None) + else: + lexindexfunc[i] = (None, toknames[f]) + + return [(lexre, lexindexfunc)], [regex], [lexindexnames] + except Exception: + m = int(len(relist)/2) + if m == 0: + m = 1 + llist, lre, lnames = _form_master_re(relist[:m], reflags, ldict, toknames) + rlist, rre, rnames = _form_master_re(relist[m:], reflags, ldict, toknames) + return (llist+rlist), (lre+rre), (lnames+rnames) + +# ----------------------------------------------------------------------------- +# def _statetoken(s,names) +# +# Given a declaration name s of the form "t_" and a dictionary whose keys are +# state names, this function returns a tuple (states,tokenname) where states +# is a tuple of state names and tokenname is the name of the token. For example, +# calling this with s = "t_foo_bar_SPAM" might return (('foo','bar'),'SPAM') +# ----------------------------------------------------------------------------- +def _statetoken(s, names): + nonstate = 1 + parts = s.split('_') + for i, part in enumerate(parts[1:], 1): + if part not in names and part != 'ANY': + break + + if i > 1: + states = tuple(parts[1:i]) + else: + states = ('INITIAL',) + + if 'ANY' in states: + states = tuple(names) + + tokenname = '_'.join(parts[i:]) + return (states, tokenname) + + +# ----------------------------------------------------------------------------- +# LexerReflect() +# +# This class represents information needed to build a lexer as extracted from a +# user's input file. +# ----------------------------------------------------------------------------- +class LexerReflect(object): + def __init__(self, ldict, log=None, reflags=0): + self.ldict = ldict + self.error_func = None + self.tokens = [] + self.reflags = reflags + self.stateinfo = {'INITIAL': 'inclusive'} + self.modules = set() + self.error = False + self.log = PlyLogger(sys.stderr) if log is None else log + + # Get all of the basic information + def get_all(self): + self.get_tokens() + self.get_literals() + self.get_states() + self.get_rules() + + # Validate all of the information + def validate_all(self): + self.validate_tokens() + self.validate_literals() + self.validate_rules() + return self.error + + # Get the tokens map + def get_tokens(self): + tokens = self.ldict.get('tokens', None) + if not tokens: + self.log.error('No token list is defined') + self.error = True + return + + if not isinstance(tokens, (list, tuple)): + self.log.error('tokens must be a list or tuple') + self.error = True + return + + if not tokens: + self.log.error('tokens is empty') + self.error = True + return + + self.tokens = tokens + + # Validate the tokens + def validate_tokens(self): + terminals = {} + for n in self.tokens: + if not _is_identifier.match(n): + self.log.error("Bad token name '%s'", n) + self.error = True + if n in terminals: + self.log.warning("Token '%s' multiply defined", n) + terminals[n] = 1 + + # Get the literals specifier + def get_literals(self): + self.literals = self.ldict.get('literals', '') + if not self.literals: + self.literals = '' + + # Validate literals + def validate_literals(self): + try: + for c in self.literals: + if not isinstance(c, StringTypes) or len(c) > 1: + self.log.error('Invalid literal %s. Must be a single character', repr(c)) + self.error = True + + except TypeError: + self.log.error('Invalid literals specification. literals must be a sequence of characters') + self.error = True + + def get_states(self): + self.states = self.ldict.get('states', None) + # Build statemap + if self.states: + if not isinstance(self.states, (tuple, list)): + self.log.error('states must be defined as a tuple or list') + self.error = True + else: + for s in self.states: + if not isinstance(s, tuple) or len(s) != 2: + self.log.error("Invalid state specifier %s. Must be a tuple (statename,'exclusive|inclusive')", repr(s)) + self.error = True + continue + name, statetype = s + if not isinstance(name, StringTypes): + self.log.error('State name %s must be a string', repr(name)) + self.error = True + continue + if not (statetype == 'inclusive' or statetype == 'exclusive'): + self.log.error("State type for state %s must be 'inclusive' or 'exclusive'", name) + self.error = True + continue + if name in self.stateinfo: + self.log.error("State '%s' already defined", name) + self.error = True + continue + self.stateinfo[name] = statetype + + # Get all of the symbols with a t_ prefix and sort them into various + # categories (functions, strings, error functions, and ignore characters) + + def get_rules(self): + tsymbols = [f for f in self.ldict if f[:2] == 't_'] + + # Now build up a list of functions and a list of strings + self.toknames = {} # Mapping of symbols to token names + self.funcsym = {} # Symbols defined as functions + self.strsym = {} # Symbols defined as strings + self.ignore = {} # Ignore strings by state + self.errorf = {} # Error functions by state + self.eoff = {} # EOF functions by state + + for s in self.stateinfo: + self.funcsym[s] = [] + self.strsym[s] = [] + + if len(tsymbols) == 0: + self.log.error('No rules of the form t_rulename are defined') + self.error = True + return + + for f in tsymbols: + t = self.ldict[f] + states, tokname = _statetoken(f, self.stateinfo) + self.toknames[f] = tokname + + if hasattr(t, '__call__'): + if tokname == 'error': + for s in states: + self.errorf[s] = t + elif tokname == 'eof': + for s in states: + self.eoff[s] = t + elif tokname == 'ignore': + line = t.__code__.co_firstlineno + file = t.__code__.co_filename + self.log.error("%s:%d: Rule '%s' must be defined as a string", file, line, t.__name__) + self.error = True + else: + for s in states: + self.funcsym[s].append((f, t)) + elif isinstance(t, StringTypes): + if tokname == 'ignore': + for s in states: + self.ignore[s] = t + if '\\' in t: + self.log.warning("%s contains a literal backslash '\\'", f) + + elif tokname == 'error': + self.log.error("Rule '%s' must be defined as a function", f) + self.error = True + else: + for s in states: + self.strsym[s].append((f, t)) + else: + self.log.error('%s not defined as a function or string', f) + self.error = True + + # Sort the functions by line number + for f in self.funcsym.values(): + f.sort(key=lambda x: x[1].__code__.co_firstlineno) + + # Sort the strings by regular expression length + for s in self.strsym.values(): + s.sort(key=lambda x: len(x[1]), reverse=True) + + # Validate all of the t_rules collected + def validate_rules(self): + for state in self.stateinfo: + # Validate all rules defined by functions + + for fname, f in self.funcsym[state]: + line = f.__code__.co_firstlineno + file = f.__code__.co_filename + module = inspect.getmodule(f) + self.modules.add(module) + + tokname = self.toknames[fname] + if isinstance(f, types.MethodType): + reqargs = 2 + else: + reqargs = 1 + nargs = f.__code__.co_argcount + if nargs > reqargs: + self.log.error("%s:%d: Rule '%s' has too many arguments", file, line, f.__name__) + self.error = True + continue + + if nargs < reqargs: + self.log.error("%s:%d: Rule '%s' requires an argument", file, line, f.__name__) + self.error = True + continue + + if not _get_regex(f): + self.log.error("%s:%d: No regular expression defined for rule '%s'", file, line, f.__name__) + self.error = True + continue + + try: + c = re.compile('(?P<%s>%s)' % (fname, _get_regex(f)), self.reflags) + if c.match(''): + self.log.error("%s:%d: Regular expression for rule '%s' matches empty string", file, line, f.__name__) + self.error = True + except re.error as e: + self.log.error("%s:%d: Invalid regular expression for rule '%s'. %s", file, line, f.__name__, e) + if '#' in _get_regex(f): + self.log.error("%s:%d. Make sure '#' in rule '%s' is escaped with '\\#'", file, line, f.__name__) + self.error = True + + # Validate all rules defined by strings + for name, r in self.strsym[state]: + tokname = self.toknames[name] + if tokname == 'error': + self.log.error("Rule '%s' must be defined as a function", name) + self.error = True + continue + + if tokname not in self.tokens and tokname.find('ignore_') < 0: + self.log.error("Rule '%s' defined for an unspecified token %s", name, tokname) + self.error = True + continue + + try: + c = re.compile('(?P<%s>%s)' % (name, r), self.reflags) + if (c.match('')): + self.log.error("Regular expression for rule '%s' matches empty string", name) + self.error = True + except re.error as e: + self.log.error("Invalid regular expression for rule '%s'. %s", name, e) + if '#' in r: + self.log.error("Make sure '#' in rule '%s' is escaped with '\\#'", name) + self.error = True + + if not self.funcsym[state] and not self.strsym[state]: + self.log.error("No rules defined for state '%s'", state) + self.error = True + + # Validate the error function + efunc = self.errorf.get(state, None) + if efunc: + f = efunc + line = f.__code__.co_firstlineno + file = f.__code__.co_filename + module = inspect.getmodule(f) + self.modules.add(module) + + if isinstance(f, types.MethodType): + reqargs = 2 + else: + reqargs = 1 + nargs = f.__code__.co_argcount + if nargs > reqargs: + self.log.error("%s:%d: Rule '%s' has too many arguments", file, line, f.__name__) + self.error = True + + if nargs < reqargs: + self.log.error("%s:%d: Rule '%s' requires an argument", file, line, f.__name__) + self.error = True + + for module in self.modules: + self.validate_module(module) + + # ----------------------------------------------------------------------------- + # validate_module() + # + # This checks to see if there are duplicated t_rulename() functions or strings + # in the parser input file. This is done using a simple regular expression + # match on each line in the source code of the given module. + # ----------------------------------------------------------------------------- + + def validate_module(self, module): + try: + lines, linen = inspect.getsourcelines(module) + except IOError: + return + + fre = re.compile(r'\s*def\s+(t_[a-zA-Z_0-9]*)\(') + sre = re.compile(r'\s*(t_[a-zA-Z_0-9]*)\s*=') + + counthash = {} + linen += 1 + for line in lines: + m = fre.match(line) + if not m: + m = sre.match(line) + if m: + name = m.group(1) + prev = counthash.get(name) + if not prev: + counthash[name] = linen + else: + filename = inspect.getsourcefile(module) + self.log.error('%s:%d: Rule %s redefined. Previously defined on line %d', filename, linen, name, prev) + self.error = True + linen += 1 + +# ----------------------------------------------------------------------------- +# lex(module) +# +# Build all of the regular expression rules from definitions in the supplied module +# ----------------------------------------------------------------------------- +def lex(module=None, object=None, debug=False, optimize=False, lextab='lextab', + reflags=int(re.VERBOSE), nowarn=False, outputdir=None, debuglog=None, errorlog=None): + + if lextab is None: + lextab = 'lextab' + + global lexer + + ldict = None + stateinfo = {'INITIAL': 'inclusive'} + lexobj = Lexer() + lexobj.lexoptimize = optimize + global token, input + + if errorlog is None: + errorlog = PlyLogger(sys.stderr) + + if debug: + if debuglog is None: + debuglog = PlyLogger(sys.stderr) + + # Get the module dictionary used for the lexer + if object: + module = object + + # Get the module dictionary used for the parser + if module: + _items = [(k, getattr(module, k)) for k in dir(module)] + ldict = dict(_items) + # If no __file__ attribute is available, try to obtain it from the __module__ instead + if '__file__' not in ldict: + ldict['__file__'] = sys.modules[ldict['__module__']].__file__ + else: + ldict = get_caller_module_dict(2) + + # Determine if the module is package of a package or not. + # If so, fix the tabmodule setting so that tables load correctly + pkg = ldict.get('__package__') + if pkg and isinstance(lextab, str): + if '.' not in lextab: + lextab = pkg + '.' + lextab + + # Collect parser information from the dictionary + linfo = LexerReflect(ldict, log=errorlog, reflags=reflags) + linfo.get_all() + if not optimize: + if linfo.validate_all(): + raise SyntaxError("Can't build lexer") + + if optimize and lextab: + try: + lexobj.readtab(lextab, ldict) + token = lexobj.token + input = lexobj.input + lexer = lexobj + return lexobj + + except ImportError: + pass + + # Dump some basic debugging information + if debug: + debuglog.info('lex: tokens = %r', linfo.tokens) + debuglog.info('lex: literals = %r', linfo.literals) + debuglog.info('lex: states = %r', linfo.stateinfo) + + # Build a dictionary of valid token names + lexobj.lextokens = set() + for n in linfo.tokens: + lexobj.lextokens.add(n) + + # Get literals specification + if isinstance(linfo.literals, (list, tuple)): + lexobj.lexliterals = type(linfo.literals[0])().join(linfo.literals) + else: + lexobj.lexliterals = linfo.literals + + lexobj.lextokens_all = lexobj.lextokens | set(lexobj.lexliterals) + + # Get the stateinfo dictionary + stateinfo = linfo.stateinfo + + regexs = {} + # Build the master regular expressions + for state in stateinfo: + regex_list = [] + + # Add rules defined by functions first + for fname, f in linfo.funcsym[state]: + line = f.__code__.co_firstlineno + file = f.__code__.co_filename + regex_list.append('(?P<%s>%s)' % (fname, _get_regex(f))) + if debug: + debuglog.info("lex: Adding rule %s -> '%s' (state '%s')", fname, _get_regex(f), state) + + # Now add all of the simple rules + for name, r in linfo.strsym[state]: + regex_list.append('(?P<%s>%s)' % (name, r)) + if debug: + debuglog.info("lex: Adding rule %s -> '%s' (state '%s')", name, r, state) + + regexs[state] = regex_list + + # Build the master regular expressions + + if debug: + debuglog.info('lex: ==== MASTER REGEXS FOLLOW ====') + + for state in regexs: + lexre, re_text, re_names = _form_master_re(regexs[state], reflags, ldict, linfo.toknames) + lexobj.lexstatere[state] = lexre + lexobj.lexstateretext[state] = re_text + lexobj.lexstaterenames[state] = re_names + if debug: + for i, text in enumerate(re_text): + debuglog.info("lex: state '%s' : regex[%d] = '%s'", state, i, text) + + # For inclusive states, we need to add the regular expressions from the INITIAL state + for state, stype in stateinfo.items(): + if state != 'INITIAL' and stype == 'inclusive': + lexobj.lexstatere[state].extend(lexobj.lexstatere['INITIAL']) + lexobj.lexstateretext[state].extend(lexobj.lexstateretext['INITIAL']) + lexobj.lexstaterenames[state].extend(lexobj.lexstaterenames['INITIAL']) + + lexobj.lexstateinfo = stateinfo + lexobj.lexre = lexobj.lexstatere['INITIAL'] + lexobj.lexretext = lexobj.lexstateretext['INITIAL'] + lexobj.lexreflags = reflags + + # Set up ignore variables + lexobj.lexstateignore = linfo.ignore + lexobj.lexignore = lexobj.lexstateignore.get('INITIAL', '') + + # Set up error functions + lexobj.lexstateerrorf = linfo.errorf + lexobj.lexerrorf = linfo.errorf.get('INITIAL', None) + if not lexobj.lexerrorf: + errorlog.warning('No t_error rule is defined') + + # Set up eof functions + lexobj.lexstateeoff = linfo.eoff + lexobj.lexeoff = linfo.eoff.get('INITIAL', None) + + # Check state information for ignore and error rules + for s, stype in stateinfo.items(): + if stype == 'exclusive': + if s not in linfo.errorf: + errorlog.warning("No error rule is defined for exclusive state '%s'", s) + if s not in linfo.ignore and lexobj.lexignore: + errorlog.warning("No ignore rule is defined for exclusive state '%s'", s) + elif stype == 'inclusive': + if s not in linfo.errorf: + linfo.errorf[s] = linfo.errorf.get('INITIAL', None) + if s not in linfo.ignore: + linfo.ignore[s] = linfo.ignore.get('INITIAL', '') + + # Create global versions of the token() and input() functions + token = lexobj.token + input = lexobj.input + lexer = lexobj + + # If in optimize mode, we write the lextab + if lextab and optimize: + if outputdir is None: + # If no output directory is set, the location of the output files + # is determined according to the following rules: + # - If lextab specifies a package, files go into that package directory + # - Otherwise, files go in the same directory as the specifying module + if isinstance(lextab, types.ModuleType): + srcfile = lextab.__file__ + else: + if '.' not in lextab: + srcfile = ldict['__file__'] + else: + parts = lextab.split('.') + pkgname = '.'.join(parts[:-1]) + exec('import %s' % pkgname) + srcfile = getattr(sys.modules[pkgname], '__file__', '') + outputdir = os.path.dirname(srcfile) + try: + lexobj.writetab(lextab, outputdir) + except IOError as e: + errorlog.warning("Couldn't write lextab module %r. %s" % (lextab, e)) + + return lexobj + +# ----------------------------------------------------------------------------- +# runmain() +# +# This runs the lexer as a main program +# ----------------------------------------------------------------------------- + +def runmain(lexer=None, data=None): + if not data: + try: + filename = sys.argv[1] + f = open(filename) + data = f.read() + f.close() + except IndexError: + sys.stdout.write('Reading from standard input (type EOF to end):\n') + data = sys.stdin.read() + + if lexer: + _input = lexer.input + else: + _input = input + _input(data) + if lexer: + _token = lexer.token + else: + _token = token + + while True: + tok = _token() + if not tok: + break + sys.stdout.write('(%s,%r,%d,%d)\n' % (tok.type, tok.value, tok.lineno, tok.lexpos)) + +# ----------------------------------------------------------------------------- +# @TOKEN(regex) +# +# This decorator function can be used to set the regex expression on a function +# when its docstring might need to be set in an alternative way +# ----------------------------------------------------------------------------- + +def TOKEN(r): + def set_regex(f): + if hasattr(r, '__call__'): + f.regex = _get_regex(r) + else: + f.regex = r + return f + return set_regex + +# Alternative spelling of the TOKEN decorator +Token = TOKEN diff --git a/venv/lib/python3.12/site-packages/pycparser/ply/yacc.py b/venv/lib/python3.12/site-packages/pycparser/ply/yacc.py new file mode 100644 index 00000000..20b4f286 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pycparser/ply/yacc.py @@ -0,0 +1,3494 @@ +# ----------------------------------------------------------------------------- +# ply: yacc.py +# +# Copyright (C) 2001-2017 +# David M. Beazley (Dabeaz LLC) +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# * Neither the name of the David Beazley or Dabeaz LLC may be used to +# endorse or promote products derived from this software without +# specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# ----------------------------------------------------------------------------- +# +# This implements an LR parser that is constructed from grammar rules defined +# as Python functions. The grammer is specified by supplying the BNF inside +# Python documentation strings. The inspiration for this technique was borrowed +# from John Aycock's Spark parsing system. PLY might be viewed as cross between +# Spark and the GNU bison utility. +# +# The current implementation is only somewhat object-oriented. The +# LR parser itself is defined in terms of an object (which allows multiple +# parsers to co-exist). However, most of the variables used during table +# construction are defined in terms of global variables. Users shouldn't +# notice unless they are trying to define multiple parsers at the same +# time using threads (in which case they should have their head examined). +# +# This implementation supports both SLR and LALR(1) parsing. LALR(1) +# support was originally implemented by Elias Ioup (ezioup@alumni.uchicago.edu), +# using the algorithm found in Aho, Sethi, and Ullman "Compilers: Principles, +# Techniques, and Tools" (The Dragon Book). LALR(1) has since been replaced +# by the more efficient DeRemer and Pennello algorithm. +# +# :::::::: WARNING ::::::: +# +# Construction of LR parsing tables is fairly complicated and expensive. +# To make this module run fast, a *LOT* of work has been put into +# optimization---often at the expensive of readability and what might +# consider to be good Python "coding style." Modify the code at your +# own risk! +# ---------------------------------------------------------------------------- + +import re +import types +import sys +import os.path +import inspect +import base64 +import warnings + +__version__ = '3.10' +__tabversion__ = '3.10' + +#----------------------------------------------------------------------------- +# === User configurable parameters === +# +# Change these to modify the default behavior of yacc (if you wish) +#----------------------------------------------------------------------------- + +yaccdebug = True # Debugging mode. If set, yacc generates a + # a 'parser.out' file in the current directory + +debug_file = 'parser.out' # Default name of the debugging file +tab_module = 'parsetab' # Default name of the table module +default_lr = 'LALR' # Default LR table generation method + +error_count = 3 # Number of symbols that must be shifted to leave recovery mode + +yaccdevel = False # Set to True if developing yacc. This turns off optimized + # implementations of certain functions. + +resultlimit = 40 # Size limit of results when running in debug mode. + +pickle_protocol = 0 # Protocol to use when writing pickle files + +# String type-checking compatibility +if sys.version_info[0] < 3: + string_types = basestring +else: + string_types = str + +MAXINT = sys.maxsize + +# This object is a stand-in for a logging object created by the +# logging module. PLY will use this by default to create things +# such as the parser.out file. If a user wants more detailed +# information, they can create their own logging object and pass +# it into PLY. + +class PlyLogger(object): + def __init__(self, f): + self.f = f + + def debug(self, msg, *args, **kwargs): + self.f.write((msg % args) + '\n') + + info = debug + + def warning(self, msg, *args, **kwargs): + self.f.write('WARNING: ' + (msg % args) + '\n') + + def error(self, msg, *args, **kwargs): + self.f.write('ERROR: ' + (msg % args) + '\n') + + critical = debug + +# Null logger is used when no output is generated. Does nothing. +class NullLogger(object): + def __getattribute__(self, name): + return self + + def __call__(self, *args, **kwargs): + return self + +# Exception raised for yacc-related errors +class YaccError(Exception): + pass + +# Format the result message that the parser produces when running in debug mode. +def format_result(r): + repr_str = repr(r) + if '\n' in repr_str: + repr_str = repr(repr_str) + if len(repr_str) > resultlimit: + repr_str = repr_str[:resultlimit] + ' ...' + result = '<%s @ 0x%x> (%s)' % (type(r).__name__, id(r), repr_str) + return result + +# Format stack entries when the parser is running in debug mode +def format_stack_entry(r): + repr_str = repr(r) + if '\n' in repr_str: + repr_str = repr(repr_str) + if len(repr_str) < 16: + return repr_str + else: + return '<%s @ 0x%x>' % (type(r).__name__, id(r)) + +# Panic mode error recovery support. This feature is being reworked--much of the +# code here is to offer a deprecation/backwards compatible transition + +_errok = None +_token = None +_restart = None +_warnmsg = '''PLY: Don't use global functions errok(), token(), and restart() in p_error(). +Instead, invoke the methods on the associated parser instance: + + def p_error(p): + ... + # Use parser.errok(), parser.token(), parser.restart() + ... + + parser = yacc.yacc() +''' + +def errok(): + warnings.warn(_warnmsg) + return _errok() + +def restart(): + warnings.warn(_warnmsg) + return _restart() + +def token(): + warnings.warn(_warnmsg) + return _token() + +# Utility function to call the p_error() function with some deprecation hacks +def call_errorfunc(errorfunc, token, parser): + global _errok, _token, _restart + _errok = parser.errok + _token = parser.token + _restart = parser.restart + r = errorfunc(token) + try: + del _errok, _token, _restart + except NameError: + pass + return r + +#----------------------------------------------------------------------------- +# === LR Parsing Engine === +# +# The following classes are used for the LR parser itself. These are not +# used during table construction and are independent of the actual LR +# table generation algorithm +#----------------------------------------------------------------------------- + +# This class is used to hold non-terminal grammar symbols during parsing. +# It normally has the following attributes set: +# .type = Grammar symbol type +# .value = Symbol value +# .lineno = Starting line number +# .endlineno = Ending line number (optional, set automatically) +# .lexpos = Starting lex position +# .endlexpos = Ending lex position (optional, set automatically) + +class YaccSymbol: + def __str__(self): + return self.type + + def __repr__(self): + return str(self) + +# This class is a wrapper around the objects actually passed to each +# grammar rule. Index lookup and assignment actually assign the +# .value attribute of the underlying YaccSymbol object. +# The lineno() method returns the line number of a given +# item (or 0 if not defined). The linespan() method returns +# a tuple of (startline,endline) representing the range of lines +# for a symbol. The lexspan() method returns a tuple (lexpos,endlexpos) +# representing the range of positional information for a symbol. + +class YaccProduction: + def __init__(self, s, stack=None): + self.slice = s + self.stack = stack + self.lexer = None + self.parser = None + + def __getitem__(self, n): + if isinstance(n, slice): + return [s.value for s in self.slice[n]] + elif n >= 0: + return self.slice[n].value + else: + return self.stack[n].value + + def __setitem__(self, n, v): + self.slice[n].value = v + + def __getslice__(self, i, j): + return [s.value for s in self.slice[i:j]] + + def __len__(self): + return len(self.slice) + + def lineno(self, n): + return getattr(self.slice[n], 'lineno', 0) + + def set_lineno(self, n, lineno): + self.slice[n].lineno = lineno + + def linespan(self, n): + startline = getattr(self.slice[n], 'lineno', 0) + endline = getattr(self.slice[n], 'endlineno', startline) + return startline, endline + + def lexpos(self, n): + return getattr(self.slice[n], 'lexpos', 0) + + def lexspan(self, n): + startpos = getattr(self.slice[n], 'lexpos', 0) + endpos = getattr(self.slice[n], 'endlexpos', startpos) + return startpos, endpos + + def error(self): + raise SyntaxError + +# ----------------------------------------------------------------------------- +# == LRParser == +# +# The LR Parsing engine. +# ----------------------------------------------------------------------------- + +class LRParser: + def __init__(self, lrtab, errorf): + self.productions = lrtab.lr_productions + self.action = lrtab.lr_action + self.goto = lrtab.lr_goto + self.errorfunc = errorf + self.set_defaulted_states() + self.errorok = True + + def errok(self): + self.errorok = True + + def restart(self): + del self.statestack[:] + del self.symstack[:] + sym = YaccSymbol() + sym.type = '$end' + self.symstack.append(sym) + self.statestack.append(0) + + # Defaulted state support. + # This method identifies parser states where there is only one possible reduction action. + # For such states, the parser can make a choose to make a rule reduction without consuming + # the next look-ahead token. This delayed invocation of the tokenizer can be useful in + # certain kinds of advanced parsing situations where the lexer and parser interact with + # each other or change states (i.e., manipulation of scope, lexer states, etc.). + # + # See: https://www.gnu.org/software/bison/manual/html_node/Default-Reductions.html#Default-Reductions + def set_defaulted_states(self): + self.defaulted_states = {} + for state, actions in self.action.items(): + rules = list(actions.values()) + if len(rules) == 1 and rules[0] < 0: + self.defaulted_states[state] = rules[0] + + def disable_defaulted_states(self): + self.defaulted_states = {} + + def parse(self, input=None, lexer=None, debug=False, tracking=False, tokenfunc=None): + if debug or yaccdevel: + if isinstance(debug, int): + debug = PlyLogger(sys.stderr) + return self.parsedebug(input, lexer, debug, tracking, tokenfunc) + elif tracking: + return self.parseopt(input, lexer, debug, tracking, tokenfunc) + else: + return self.parseopt_notrack(input, lexer, debug, tracking, tokenfunc) + + + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + # parsedebug(). + # + # This is the debugging enabled version of parse(). All changes made to the + # parsing engine should be made here. Optimized versions of this function + # are automatically created by the ply/ygen.py script. This script cuts out + # sections enclosed in markers such as this: + # + # #--! DEBUG + # statements + # #--! DEBUG + # + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + def parsedebug(self, input=None, lexer=None, debug=False, tracking=False, tokenfunc=None): + #--! parsedebug-start + lookahead = None # Current lookahead symbol + lookaheadstack = [] # Stack of lookahead symbols + actions = self.action # Local reference to action table (to avoid lookup on self.) + goto = self.goto # Local reference to goto table (to avoid lookup on self.) + prod = self.productions # Local reference to production list (to avoid lookup on self.) + defaulted_states = self.defaulted_states # Local reference to defaulted states + pslice = YaccProduction(None) # Production object passed to grammar rules + errorcount = 0 # Used during error recovery + + #--! DEBUG + debug.info('PLY: PARSE DEBUG START') + #--! DEBUG + + # If no lexer was given, we will try to use the lex module + if not lexer: + from . import lex + lexer = lex.lexer + + # Set up the lexer and parser objects on pslice + pslice.lexer = lexer + pslice.parser = self + + # If input was supplied, pass to lexer + if input is not None: + lexer.input(input) + + if tokenfunc is None: + # Tokenize function + get_token = lexer.token + else: + get_token = tokenfunc + + # Set the parser() token method (sometimes used in error recovery) + self.token = get_token + + # Set up the state and symbol stacks + + statestack = [] # Stack of parsing states + self.statestack = statestack + symstack = [] # Stack of grammar symbols + self.symstack = symstack + + pslice.stack = symstack # Put in the production + errtoken = None # Err token + + # The start state is assumed to be (0,$end) + + statestack.append(0) + sym = YaccSymbol() + sym.type = '$end' + symstack.append(sym) + state = 0 + while True: + # Get the next symbol on the input. If a lookahead symbol + # is already set, we just use that. Otherwise, we'll pull + # the next token off of the lookaheadstack or from the lexer + + #--! DEBUG + debug.debug('') + debug.debug('State : %s', state) + #--! DEBUG + + if state not in defaulted_states: + if not lookahead: + if not lookaheadstack: + lookahead = get_token() # Get the next token + else: + lookahead = lookaheadstack.pop() + if not lookahead: + lookahead = YaccSymbol() + lookahead.type = '$end' + + # Check the action table + ltype = lookahead.type + t = actions[state].get(ltype) + else: + t = defaulted_states[state] + #--! DEBUG + debug.debug('Defaulted state %s: Reduce using %d', state, -t) + #--! DEBUG + + #--! DEBUG + debug.debug('Stack : %s', + ('%s . %s' % (' '.join([xx.type for xx in symstack][1:]), str(lookahead))).lstrip()) + #--! DEBUG + + if t is not None: + if t > 0: + # shift a symbol on the stack + statestack.append(t) + state = t + + #--! DEBUG + debug.debug('Action : Shift and goto state %s', t) + #--! DEBUG + + symstack.append(lookahead) + lookahead = None + + # Decrease error count on successful shift + if errorcount: + errorcount -= 1 + continue + + if t < 0: + # reduce a symbol on the stack, emit a production + p = prod[-t] + pname = p.name + plen = p.len + + # Get production function + sym = YaccSymbol() + sym.type = pname # Production name + sym.value = None + + #--! DEBUG + if plen: + debug.info('Action : Reduce rule [%s] with %s and goto state %d', p.str, + '['+','.join([format_stack_entry(_v.value) for _v in symstack[-plen:]])+']', + goto[statestack[-1-plen]][pname]) + else: + debug.info('Action : Reduce rule [%s] with %s and goto state %d', p.str, [], + goto[statestack[-1]][pname]) + + #--! DEBUG + + if plen: + targ = symstack[-plen-1:] + targ[0] = sym + + #--! TRACKING + if tracking: + t1 = targ[1] + sym.lineno = t1.lineno + sym.lexpos = t1.lexpos + t1 = targ[-1] + sym.endlineno = getattr(t1, 'endlineno', t1.lineno) + sym.endlexpos = getattr(t1, 'endlexpos', t1.lexpos) + #--! TRACKING + + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + # The code enclosed in this section is duplicated + # below as a performance optimization. Make sure + # changes get made in both locations. + + pslice.slice = targ + + try: + # Call the grammar rule with our special slice object + del symstack[-plen:] + self.state = state + p.callable(pslice) + del statestack[-plen:] + #--! DEBUG + debug.info('Result : %s', format_result(pslice[0])) + #--! DEBUG + symstack.append(sym) + state = goto[statestack[-1]][pname] + statestack.append(state) + except SyntaxError: + # If an error was set. Enter error recovery state + lookaheadstack.append(lookahead) # Save the current lookahead token + symstack.extend(targ[1:-1]) # Put the production slice back on the stack + statestack.pop() # Pop back one state (before the reduce) + state = statestack[-1] + sym.type = 'error' + sym.value = 'error' + lookahead = sym + errorcount = error_count + self.errorok = False + + continue + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + else: + + #--! TRACKING + if tracking: + sym.lineno = lexer.lineno + sym.lexpos = lexer.lexpos + #--! TRACKING + + targ = [sym] + + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + # The code enclosed in this section is duplicated + # above as a performance optimization. Make sure + # changes get made in both locations. + + pslice.slice = targ + + try: + # Call the grammar rule with our special slice object + self.state = state + p.callable(pslice) + #--! DEBUG + debug.info('Result : %s', format_result(pslice[0])) + #--! DEBUG + symstack.append(sym) + state = goto[statestack[-1]][pname] + statestack.append(state) + except SyntaxError: + # If an error was set. Enter error recovery state + lookaheadstack.append(lookahead) # Save the current lookahead token + statestack.pop() # Pop back one state (before the reduce) + state = statestack[-1] + sym.type = 'error' + sym.value = 'error' + lookahead = sym + errorcount = error_count + self.errorok = False + + continue + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + if t == 0: + n = symstack[-1] + result = getattr(n, 'value', None) + #--! DEBUG + debug.info('Done : Returning %s', format_result(result)) + debug.info('PLY: PARSE DEBUG END') + #--! DEBUG + return result + + if t is None: + + #--! DEBUG + debug.error('Error : %s', + ('%s . %s' % (' '.join([xx.type for xx in symstack][1:]), str(lookahead))).lstrip()) + #--! DEBUG + + # We have some kind of parsing error here. To handle + # this, we are going to push the current token onto + # the tokenstack and replace it with an 'error' token. + # If there are any synchronization rules, they may + # catch it. + # + # In addition to pushing the error token, we call call + # the user defined p_error() function if this is the + # first syntax error. This function is only called if + # errorcount == 0. + if errorcount == 0 or self.errorok: + errorcount = error_count + self.errorok = False + errtoken = lookahead + if errtoken.type == '$end': + errtoken = None # End of file! + if self.errorfunc: + if errtoken and not hasattr(errtoken, 'lexer'): + errtoken.lexer = lexer + self.state = state + tok = call_errorfunc(self.errorfunc, errtoken, self) + if self.errorok: + # User must have done some kind of panic + # mode recovery on their own. The + # returned token is the next lookahead + lookahead = tok + errtoken = None + continue + else: + if errtoken: + if hasattr(errtoken, 'lineno'): + lineno = lookahead.lineno + else: + lineno = 0 + if lineno: + sys.stderr.write('yacc: Syntax error at line %d, token=%s\n' % (lineno, errtoken.type)) + else: + sys.stderr.write('yacc: Syntax error, token=%s' % errtoken.type) + else: + sys.stderr.write('yacc: Parse error in input. EOF\n') + return + + else: + errorcount = error_count + + # case 1: the statestack only has 1 entry on it. If we're in this state, the + # entire parse has been rolled back and we're completely hosed. The token is + # discarded and we just keep going. + + if len(statestack) <= 1 and lookahead.type != '$end': + lookahead = None + errtoken = None + state = 0 + # Nuke the pushback stack + del lookaheadstack[:] + continue + + # case 2: the statestack has a couple of entries on it, but we're + # at the end of the file. nuke the top entry and generate an error token + + # Start nuking entries on the stack + if lookahead.type == '$end': + # Whoa. We're really hosed here. Bail out + return + + if lookahead.type != 'error': + sym = symstack[-1] + if sym.type == 'error': + # Hmmm. Error is on top of stack, we'll just nuke input + # symbol and continue + #--! TRACKING + if tracking: + sym.endlineno = getattr(lookahead, 'lineno', sym.lineno) + sym.endlexpos = getattr(lookahead, 'lexpos', sym.lexpos) + #--! TRACKING + lookahead = None + continue + + # Create the error symbol for the first time and make it the new lookahead symbol + t = YaccSymbol() + t.type = 'error' + + if hasattr(lookahead, 'lineno'): + t.lineno = t.endlineno = lookahead.lineno + if hasattr(lookahead, 'lexpos'): + t.lexpos = t.endlexpos = lookahead.lexpos + t.value = lookahead + lookaheadstack.append(lookahead) + lookahead = t + else: + sym = symstack.pop() + #--! TRACKING + if tracking: + lookahead.lineno = sym.lineno + lookahead.lexpos = sym.lexpos + #--! TRACKING + statestack.pop() + state = statestack[-1] + + continue + + # Call an error function here + raise RuntimeError('yacc: internal parser error!!!\n') + + #--! parsedebug-end + + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + # parseopt(). + # + # Optimized version of parse() method. DO NOT EDIT THIS CODE DIRECTLY! + # This code is automatically generated by the ply/ygen.py script. Make + # changes to the parsedebug() method instead. + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + def parseopt(self, input=None, lexer=None, debug=False, tracking=False, tokenfunc=None): + #--! parseopt-start + lookahead = None # Current lookahead symbol + lookaheadstack = [] # Stack of lookahead symbols + actions = self.action # Local reference to action table (to avoid lookup on self.) + goto = self.goto # Local reference to goto table (to avoid lookup on self.) + prod = self.productions # Local reference to production list (to avoid lookup on self.) + defaulted_states = self.defaulted_states # Local reference to defaulted states + pslice = YaccProduction(None) # Production object passed to grammar rules + errorcount = 0 # Used during error recovery + + + # If no lexer was given, we will try to use the lex module + if not lexer: + from . import lex + lexer = lex.lexer + + # Set up the lexer and parser objects on pslice + pslice.lexer = lexer + pslice.parser = self + + # If input was supplied, pass to lexer + if input is not None: + lexer.input(input) + + if tokenfunc is None: + # Tokenize function + get_token = lexer.token + else: + get_token = tokenfunc + + # Set the parser() token method (sometimes used in error recovery) + self.token = get_token + + # Set up the state and symbol stacks + + statestack = [] # Stack of parsing states + self.statestack = statestack + symstack = [] # Stack of grammar symbols + self.symstack = symstack + + pslice.stack = symstack # Put in the production + errtoken = None # Err token + + # The start state is assumed to be (0,$end) + + statestack.append(0) + sym = YaccSymbol() + sym.type = '$end' + symstack.append(sym) + state = 0 + while True: + # Get the next symbol on the input. If a lookahead symbol + # is already set, we just use that. Otherwise, we'll pull + # the next token off of the lookaheadstack or from the lexer + + + if state not in defaulted_states: + if not lookahead: + if not lookaheadstack: + lookahead = get_token() # Get the next token + else: + lookahead = lookaheadstack.pop() + if not lookahead: + lookahead = YaccSymbol() + lookahead.type = '$end' + + # Check the action table + ltype = lookahead.type + t = actions[state].get(ltype) + else: + t = defaulted_states[state] + + + if t is not None: + if t > 0: + # shift a symbol on the stack + statestack.append(t) + state = t + + + symstack.append(lookahead) + lookahead = None + + # Decrease error count on successful shift + if errorcount: + errorcount -= 1 + continue + + if t < 0: + # reduce a symbol on the stack, emit a production + p = prod[-t] + pname = p.name + plen = p.len + + # Get production function + sym = YaccSymbol() + sym.type = pname # Production name + sym.value = None + + + if plen: + targ = symstack[-plen-1:] + targ[0] = sym + + #--! TRACKING + if tracking: + t1 = targ[1] + sym.lineno = t1.lineno + sym.lexpos = t1.lexpos + t1 = targ[-1] + sym.endlineno = getattr(t1, 'endlineno', t1.lineno) + sym.endlexpos = getattr(t1, 'endlexpos', t1.lexpos) + #--! TRACKING + + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + # The code enclosed in this section is duplicated + # below as a performance optimization. Make sure + # changes get made in both locations. + + pslice.slice = targ + + try: + # Call the grammar rule with our special slice object + del symstack[-plen:] + self.state = state + p.callable(pslice) + del statestack[-plen:] + symstack.append(sym) + state = goto[statestack[-1]][pname] + statestack.append(state) + except SyntaxError: + # If an error was set. Enter error recovery state + lookaheadstack.append(lookahead) # Save the current lookahead token + symstack.extend(targ[1:-1]) # Put the production slice back on the stack + statestack.pop() # Pop back one state (before the reduce) + state = statestack[-1] + sym.type = 'error' + sym.value = 'error' + lookahead = sym + errorcount = error_count + self.errorok = False + + continue + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + else: + + #--! TRACKING + if tracking: + sym.lineno = lexer.lineno + sym.lexpos = lexer.lexpos + #--! TRACKING + + targ = [sym] + + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + # The code enclosed in this section is duplicated + # above as a performance optimization. Make sure + # changes get made in both locations. + + pslice.slice = targ + + try: + # Call the grammar rule with our special slice object + self.state = state + p.callable(pslice) + symstack.append(sym) + state = goto[statestack[-1]][pname] + statestack.append(state) + except SyntaxError: + # If an error was set. Enter error recovery state + lookaheadstack.append(lookahead) # Save the current lookahead token + statestack.pop() # Pop back one state (before the reduce) + state = statestack[-1] + sym.type = 'error' + sym.value = 'error' + lookahead = sym + errorcount = error_count + self.errorok = False + + continue + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + if t == 0: + n = symstack[-1] + result = getattr(n, 'value', None) + return result + + if t is None: + + + # We have some kind of parsing error here. To handle + # this, we are going to push the current token onto + # the tokenstack and replace it with an 'error' token. + # If there are any synchronization rules, they may + # catch it. + # + # In addition to pushing the error token, we call call + # the user defined p_error() function if this is the + # first syntax error. This function is only called if + # errorcount == 0. + if errorcount == 0 or self.errorok: + errorcount = error_count + self.errorok = False + errtoken = lookahead + if errtoken.type == '$end': + errtoken = None # End of file! + if self.errorfunc: + if errtoken and not hasattr(errtoken, 'lexer'): + errtoken.lexer = lexer + self.state = state + tok = call_errorfunc(self.errorfunc, errtoken, self) + if self.errorok: + # User must have done some kind of panic + # mode recovery on their own. The + # returned token is the next lookahead + lookahead = tok + errtoken = None + continue + else: + if errtoken: + if hasattr(errtoken, 'lineno'): + lineno = lookahead.lineno + else: + lineno = 0 + if lineno: + sys.stderr.write('yacc: Syntax error at line %d, token=%s\n' % (lineno, errtoken.type)) + else: + sys.stderr.write('yacc: Syntax error, token=%s' % errtoken.type) + else: + sys.stderr.write('yacc: Parse error in input. EOF\n') + return + + else: + errorcount = error_count + + # case 1: the statestack only has 1 entry on it. If we're in this state, the + # entire parse has been rolled back and we're completely hosed. The token is + # discarded and we just keep going. + + if len(statestack) <= 1 and lookahead.type != '$end': + lookahead = None + errtoken = None + state = 0 + # Nuke the pushback stack + del lookaheadstack[:] + continue + + # case 2: the statestack has a couple of entries on it, but we're + # at the end of the file. nuke the top entry and generate an error token + + # Start nuking entries on the stack + if lookahead.type == '$end': + # Whoa. We're really hosed here. Bail out + return + + if lookahead.type != 'error': + sym = symstack[-1] + if sym.type == 'error': + # Hmmm. Error is on top of stack, we'll just nuke input + # symbol and continue + #--! TRACKING + if tracking: + sym.endlineno = getattr(lookahead, 'lineno', sym.lineno) + sym.endlexpos = getattr(lookahead, 'lexpos', sym.lexpos) + #--! TRACKING + lookahead = None + continue + + # Create the error symbol for the first time and make it the new lookahead symbol + t = YaccSymbol() + t.type = 'error' + + if hasattr(lookahead, 'lineno'): + t.lineno = t.endlineno = lookahead.lineno + if hasattr(lookahead, 'lexpos'): + t.lexpos = t.endlexpos = lookahead.lexpos + t.value = lookahead + lookaheadstack.append(lookahead) + lookahead = t + else: + sym = symstack.pop() + #--! TRACKING + if tracking: + lookahead.lineno = sym.lineno + lookahead.lexpos = sym.lexpos + #--! TRACKING + statestack.pop() + state = statestack[-1] + + continue + + # Call an error function here + raise RuntimeError('yacc: internal parser error!!!\n') + + #--! parseopt-end + + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + # parseopt_notrack(). + # + # Optimized version of parseopt() with line number tracking removed. + # DO NOT EDIT THIS CODE DIRECTLY. This code is automatically generated + # by the ply/ygen.py script. Make changes to the parsedebug() method instead. + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + def parseopt_notrack(self, input=None, lexer=None, debug=False, tracking=False, tokenfunc=None): + #--! parseopt-notrack-start + lookahead = None # Current lookahead symbol + lookaheadstack = [] # Stack of lookahead symbols + actions = self.action # Local reference to action table (to avoid lookup on self.) + goto = self.goto # Local reference to goto table (to avoid lookup on self.) + prod = self.productions # Local reference to production list (to avoid lookup on self.) + defaulted_states = self.defaulted_states # Local reference to defaulted states + pslice = YaccProduction(None) # Production object passed to grammar rules + errorcount = 0 # Used during error recovery + + + # If no lexer was given, we will try to use the lex module + if not lexer: + from . import lex + lexer = lex.lexer + + # Set up the lexer and parser objects on pslice + pslice.lexer = lexer + pslice.parser = self + + # If input was supplied, pass to lexer + if input is not None: + lexer.input(input) + + if tokenfunc is None: + # Tokenize function + get_token = lexer.token + else: + get_token = tokenfunc + + # Set the parser() token method (sometimes used in error recovery) + self.token = get_token + + # Set up the state and symbol stacks + + statestack = [] # Stack of parsing states + self.statestack = statestack + symstack = [] # Stack of grammar symbols + self.symstack = symstack + + pslice.stack = symstack # Put in the production + errtoken = None # Err token + + # The start state is assumed to be (0,$end) + + statestack.append(0) + sym = YaccSymbol() + sym.type = '$end' + symstack.append(sym) + state = 0 + while True: + # Get the next symbol on the input. If a lookahead symbol + # is already set, we just use that. Otherwise, we'll pull + # the next token off of the lookaheadstack or from the lexer + + + if state not in defaulted_states: + if not lookahead: + if not lookaheadstack: + lookahead = get_token() # Get the next token + else: + lookahead = lookaheadstack.pop() + if not lookahead: + lookahead = YaccSymbol() + lookahead.type = '$end' + + # Check the action table + ltype = lookahead.type + t = actions[state].get(ltype) + else: + t = defaulted_states[state] + + + if t is not None: + if t > 0: + # shift a symbol on the stack + statestack.append(t) + state = t + + + symstack.append(lookahead) + lookahead = None + + # Decrease error count on successful shift + if errorcount: + errorcount -= 1 + continue + + if t < 0: + # reduce a symbol on the stack, emit a production + p = prod[-t] + pname = p.name + plen = p.len + + # Get production function + sym = YaccSymbol() + sym.type = pname # Production name + sym.value = None + + + if plen: + targ = symstack[-plen-1:] + targ[0] = sym + + + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + # The code enclosed in this section is duplicated + # below as a performance optimization. Make sure + # changes get made in both locations. + + pslice.slice = targ + + try: + # Call the grammar rule with our special slice object + del symstack[-plen:] + self.state = state + p.callable(pslice) + del statestack[-plen:] + symstack.append(sym) + state = goto[statestack[-1]][pname] + statestack.append(state) + except SyntaxError: + # If an error was set. Enter error recovery state + lookaheadstack.append(lookahead) # Save the current lookahead token + symstack.extend(targ[1:-1]) # Put the production slice back on the stack + statestack.pop() # Pop back one state (before the reduce) + state = statestack[-1] + sym.type = 'error' + sym.value = 'error' + lookahead = sym + errorcount = error_count + self.errorok = False + + continue + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + else: + + + targ = [sym] + + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + # The code enclosed in this section is duplicated + # above as a performance optimization. Make sure + # changes get made in both locations. + + pslice.slice = targ + + try: + # Call the grammar rule with our special slice object + self.state = state + p.callable(pslice) + symstack.append(sym) + state = goto[statestack[-1]][pname] + statestack.append(state) + except SyntaxError: + # If an error was set. Enter error recovery state + lookaheadstack.append(lookahead) # Save the current lookahead token + statestack.pop() # Pop back one state (before the reduce) + state = statestack[-1] + sym.type = 'error' + sym.value = 'error' + lookahead = sym + errorcount = error_count + self.errorok = False + + continue + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + if t == 0: + n = symstack[-1] + result = getattr(n, 'value', None) + return result + + if t is None: + + + # We have some kind of parsing error here. To handle + # this, we are going to push the current token onto + # the tokenstack and replace it with an 'error' token. + # If there are any synchronization rules, they may + # catch it. + # + # In addition to pushing the error token, we call call + # the user defined p_error() function if this is the + # first syntax error. This function is only called if + # errorcount == 0. + if errorcount == 0 or self.errorok: + errorcount = error_count + self.errorok = False + errtoken = lookahead + if errtoken.type == '$end': + errtoken = None # End of file! + if self.errorfunc: + if errtoken and not hasattr(errtoken, 'lexer'): + errtoken.lexer = lexer + self.state = state + tok = call_errorfunc(self.errorfunc, errtoken, self) + if self.errorok: + # User must have done some kind of panic + # mode recovery on their own. The + # returned token is the next lookahead + lookahead = tok + errtoken = None + continue + else: + if errtoken: + if hasattr(errtoken, 'lineno'): + lineno = lookahead.lineno + else: + lineno = 0 + if lineno: + sys.stderr.write('yacc: Syntax error at line %d, token=%s\n' % (lineno, errtoken.type)) + else: + sys.stderr.write('yacc: Syntax error, token=%s' % errtoken.type) + else: + sys.stderr.write('yacc: Parse error in input. EOF\n') + return + + else: + errorcount = error_count + + # case 1: the statestack only has 1 entry on it. If we're in this state, the + # entire parse has been rolled back and we're completely hosed. The token is + # discarded and we just keep going. + + if len(statestack) <= 1 and lookahead.type != '$end': + lookahead = None + errtoken = None + state = 0 + # Nuke the pushback stack + del lookaheadstack[:] + continue + + # case 2: the statestack has a couple of entries on it, but we're + # at the end of the file. nuke the top entry and generate an error token + + # Start nuking entries on the stack + if lookahead.type == '$end': + # Whoa. We're really hosed here. Bail out + return + + if lookahead.type != 'error': + sym = symstack[-1] + if sym.type == 'error': + # Hmmm. Error is on top of stack, we'll just nuke input + # symbol and continue + lookahead = None + continue + + # Create the error symbol for the first time and make it the new lookahead symbol + t = YaccSymbol() + t.type = 'error' + + if hasattr(lookahead, 'lineno'): + t.lineno = t.endlineno = lookahead.lineno + if hasattr(lookahead, 'lexpos'): + t.lexpos = t.endlexpos = lookahead.lexpos + t.value = lookahead + lookaheadstack.append(lookahead) + lookahead = t + else: + sym = symstack.pop() + statestack.pop() + state = statestack[-1] + + continue + + # Call an error function here + raise RuntimeError('yacc: internal parser error!!!\n') + + #--! parseopt-notrack-end + +# ----------------------------------------------------------------------------- +# === Grammar Representation === +# +# The following functions, classes, and variables are used to represent and +# manipulate the rules that make up a grammar. +# ----------------------------------------------------------------------------- + +# regex matching identifiers +_is_identifier = re.compile(r'^[a-zA-Z0-9_-]+$') + +# ----------------------------------------------------------------------------- +# class Production: +# +# This class stores the raw information about a single production or grammar rule. +# A grammar rule refers to a specification such as this: +# +# expr : expr PLUS term +# +# Here are the basic attributes defined on all productions +# +# name - Name of the production. For example 'expr' +# prod - A list of symbols on the right side ['expr','PLUS','term'] +# prec - Production precedence level +# number - Production number. +# func - Function that executes on reduce +# file - File where production function is defined +# lineno - Line number where production function is defined +# +# The following attributes are defined or optional. +# +# len - Length of the production (number of symbols on right hand side) +# usyms - Set of unique symbols found in the production +# ----------------------------------------------------------------------------- + +class Production(object): + reduced = 0 + def __init__(self, number, name, prod, precedence=('right', 0), func=None, file='', line=0): + self.name = name + self.prod = tuple(prod) + self.number = number + self.func = func + self.callable = None + self.file = file + self.line = line + self.prec = precedence + + # Internal settings used during table construction + + self.len = len(self.prod) # Length of the production + + # Create a list of unique production symbols used in the production + self.usyms = [] + for s in self.prod: + if s not in self.usyms: + self.usyms.append(s) + + # List of all LR items for the production + self.lr_items = [] + self.lr_next = None + + # Create a string representation + if self.prod: + self.str = '%s -> %s' % (self.name, ' '.join(self.prod)) + else: + self.str = '%s -> ' % self.name + + def __str__(self): + return self.str + + def __repr__(self): + return 'Production(' + str(self) + ')' + + def __len__(self): + return len(self.prod) + + def __nonzero__(self): + return 1 + + def __getitem__(self, index): + return self.prod[index] + + # Return the nth lr_item from the production (or None if at the end) + def lr_item(self, n): + if n > len(self.prod): + return None + p = LRItem(self, n) + # Precompute the list of productions immediately following. + try: + p.lr_after = Prodnames[p.prod[n+1]] + except (IndexError, KeyError): + p.lr_after = [] + try: + p.lr_before = p.prod[n-1] + except IndexError: + p.lr_before = None + return p + + # Bind the production function name to a callable + def bind(self, pdict): + if self.func: + self.callable = pdict[self.func] + +# This class serves as a minimal standin for Production objects when +# reading table data from files. It only contains information +# actually used by the LR parsing engine, plus some additional +# debugging information. +class MiniProduction(object): + def __init__(self, str, name, len, func, file, line): + self.name = name + self.len = len + self.func = func + self.callable = None + self.file = file + self.line = line + self.str = str + + def __str__(self): + return self.str + + def __repr__(self): + return 'MiniProduction(%s)' % self.str + + # Bind the production function name to a callable + def bind(self, pdict): + if self.func: + self.callable = pdict[self.func] + + +# ----------------------------------------------------------------------------- +# class LRItem +# +# This class represents a specific stage of parsing a production rule. For +# example: +# +# expr : expr . PLUS term +# +# In the above, the "." represents the current location of the parse. Here +# basic attributes: +# +# name - Name of the production. For example 'expr' +# prod - A list of symbols on the right side ['expr','.', 'PLUS','term'] +# number - Production number. +# +# lr_next Next LR item. Example, if we are ' expr -> expr . PLUS term' +# then lr_next refers to 'expr -> expr PLUS . term' +# lr_index - LR item index (location of the ".") in the prod list. +# lookaheads - LALR lookahead symbols for this item +# len - Length of the production (number of symbols on right hand side) +# lr_after - List of all productions that immediately follow +# lr_before - Grammar symbol immediately before +# ----------------------------------------------------------------------------- + +class LRItem(object): + def __init__(self, p, n): + self.name = p.name + self.prod = list(p.prod) + self.number = p.number + self.lr_index = n + self.lookaheads = {} + self.prod.insert(n, '.') + self.prod = tuple(self.prod) + self.len = len(self.prod) + self.usyms = p.usyms + + def __str__(self): + if self.prod: + s = '%s -> %s' % (self.name, ' '.join(self.prod)) + else: + s = '%s -> ' % self.name + return s + + def __repr__(self): + return 'LRItem(' + str(self) + ')' + +# ----------------------------------------------------------------------------- +# rightmost_terminal() +# +# Return the rightmost terminal from a list of symbols. Used in add_production() +# ----------------------------------------------------------------------------- +def rightmost_terminal(symbols, terminals): + i = len(symbols) - 1 + while i >= 0: + if symbols[i] in terminals: + return symbols[i] + i -= 1 + return None + +# ----------------------------------------------------------------------------- +# === GRAMMAR CLASS === +# +# The following class represents the contents of the specified grammar along +# with various computed properties such as first sets, follow sets, LR items, etc. +# This data is used for critical parts of the table generation process later. +# ----------------------------------------------------------------------------- + +class GrammarError(YaccError): + pass + +class Grammar(object): + def __init__(self, terminals): + self.Productions = [None] # A list of all of the productions. The first + # entry is always reserved for the purpose of + # building an augmented grammar + + self.Prodnames = {} # A dictionary mapping the names of nonterminals to a list of all + # productions of that nonterminal. + + self.Prodmap = {} # A dictionary that is only used to detect duplicate + # productions. + + self.Terminals = {} # A dictionary mapping the names of terminal symbols to a + # list of the rules where they are used. + + for term in terminals: + self.Terminals[term] = [] + + self.Terminals['error'] = [] + + self.Nonterminals = {} # A dictionary mapping names of nonterminals to a list + # of rule numbers where they are used. + + self.First = {} # A dictionary of precomputed FIRST(x) symbols + + self.Follow = {} # A dictionary of precomputed FOLLOW(x) symbols + + self.Precedence = {} # Precedence rules for each terminal. Contains tuples of the + # form ('right',level) or ('nonassoc', level) or ('left',level) + + self.UsedPrecedence = set() # Precedence rules that were actually used by the grammer. + # This is only used to provide error checking and to generate + # a warning about unused precedence rules. + + self.Start = None # Starting symbol for the grammar + + + def __len__(self): + return len(self.Productions) + + def __getitem__(self, index): + return self.Productions[index] + + # ----------------------------------------------------------------------------- + # set_precedence() + # + # Sets the precedence for a given terminal. assoc is the associativity such as + # 'left','right', or 'nonassoc'. level is a numeric level. + # + # ----------------------------------------------------------------------------- + + def set_precedence(self, term, assoc, level): + assert self.Productions == [None], 'Must call set_precedence() before add_production()' + if term in self.Precedence: + raise GrammarError('Precedence already specified for terminal %r' % term) + if assoc not in ['left', 'right', 'nonassoc']: + raise GrammarError("Associativity must be one of 'left','right', or 'nonassoc'") + self.Precedence[term] = (assoc, level) + + # ----------------------------------------------------------------------------- + # add_production() + # + # Given an action function, this function assembles a production rule and + # computes its precedence level. + # + # The production rule is supplied as a list of symbols. For example, + # a rule such as 'expr : expr PLUS term' has a production name of 'expr' and + # symbols ['expr','PLUS','term']. + # + # Precedence is determined by the precedence of the right-most non-terminal + # or the precedence of a terminal specified by %prec. + # + # A variety of error checks are performed to make sure production symbols + # are valid and that %prec is used correctly. + # ----------------------------------------------------------------------------- + + def add_production(self, prodname, syms, func=None, file='', line=0): + + if prodname in self.Terminals: + raise GrammarError('%s:%d: Illegal rule name %r. Already defined as a token' % (file, line, prodname)) + if prodname == 'error': + raise GrammarError('%s:%d: Illegal rule name %r. error is a reserved word' % (file, line, prodname)) + if not _is_identifier.match(prodname): + raise GrammarError('%s:%d: Illegal rule name %r' % (file, line, prodname)) + + # Look for literal tokens + for n, s in enumerate(syms): + if s[0] in "'\"": + try: + c = eval(s) + if (len(c) > 1): + raise GrammarError('%s:%d: Literal token %s in rule %r may only be a single character' % + (file, line, s, prodname)) + if c not in self.Terminals: + self.Terminals[c] = [] + syms[n] = c + continue + except SyntaxError: + pass + if not _is_identifier.match(s) and s != '%prec': + raise GrammarError('%s:%d: Illegal name %r in rule %r' % (file, line, s, prodname)) + + # Determine the precedence level + if '%prec' in syms: + if syms[-1] == '%prec': + raise GrammarError('%s:%d: Syntax error. Nothing follows %%prec' % (file, line)) + if syms[-2] != '%prec': + raise GrammarError('%s:%d: Syntax error. %%prec can only appear at the end of a grammar rule' % + (file, line)) + precname = syms[-1] + prodprec = self.Precedence.get(precname) + if not prodprec: + raise GrammarError('%s:%d: Nothing known about the precedence of %r' % (file, line, precname)) + else: + self.UsedPrecedence.add(precname) + del syms[-2:] # Drop %prec from the rule + else: + # If no %prec, precedence is determined by the rightmost terminal symbol + precname = rightmost_terminal(syms, self.Terminals) + prodprec = self.Precedence.get(precname, ('right', 0)) + + # See if the rule is already in the rulemap + map = '%s -> %s' % (prodname, syms) + if map in self.Prodmap: + m = self.Prodmap[map] + raise GrammarError('%s:%d: Duplicate rule %s. ' % (file, line, m) + + 'Previous definition at %s:%d' % (m.file, m.line)) + + # From this point on, everything is valid. Create a new Production instance + pnumber = len(self.Productions) + if prodname not in self.Nonterminals: + self.Nonterminals[prodname] = [] + + # Add the production number to Terminals and Nonterminals + for t in syms: + if t in self.Terminals: + self.Terminals[t].append(pnumber) + else: + if t not in self.Nonterminals: + self.Nonterminals[t] = [] + self.Nonterminals[t].append(pnumber) + + # Create a production and add it to the list of productions + p = Production(pnumber, prodname, syms, prodprec, func, file, line) + self.Productions.append(p) + self.Prodmap[map] = p + + # Add to the global productions list + try: + self.Prodnames[prodname].append(p) + except KeyError: + self.Prodnames[prodname] = [p] + + # ----------------------------------------------------------------------------- + # set_start() + # + # Sets the starting symbol and creates the augmented grammar. Production + # rule 0 is S' -> start where start is the start symbol. + # ----------------------------------------------------------------------------- + + def set_start(self, start=None): + if not start: + start = self.Productions[1].name + if start not in self.Nonterminals: + raise GrammarError('start symbol %s undefined' % start) + self.Productions[0] = Production(0, "S'", [start]) + self.Nonterminals[start].append(0) + self.Start = start + + # ----------------------------------------------------------------------------- + # find_unreachable() + # + # Find all of the nonterminal symbols that can't be reached from the starting + # symbol. Returns a list of nonterminals that can't be reached. + # ----------------------------------------------------------------------------- + + def find_unreachable(self): + + # Mark all symbols that are reachable from a symbol s + def mark_reachable_from(s): + if s in reachable: + return + reachable.add(s) + for p in self.Prodnames.get(s, []): + for r in p.prod: + mark_reachable_from(r) + + reachable = set() + mark_reachable_from(self.Productions[0].prod[0]) + return [s for s in self.Nonterminals if s not in reachable] + + # ----------------------------------------------------------------------------- + # infinite_cycles() + # + # This function looks at the various parsing rules and tries to detect + # infinite recursion cycles (grammar rules where there is no possible way + # to derive a string of only terminals). + # ----------------------------------------------------------------------------- + + def infinite_cycles(self): + terminates = {} + + # Terminals: + for t in self.Terminals: + terminates[t] = True + + terminates['$end'] = True + + # Nonterminals: + + # Initialize to false: + for n in self.Nonterminals: + terminates[n] = False + + # Then propagate termination until no change: + while True: + some_change = False + for (n, pl) in self.Prodnames.items(): + # Nonterminal n terminates iff any of its productions terminates. + for p in pl: + # Production p terminates iff all of its rhs symbols terminate. + for s in p.prod: + if not terminates[s]: + # The symbol s does not terminate, + # so production p does not terminate. + p_terminates = False + break + else: + # didn't break from the loop, + # so every symbol s terminates + # so production p terminates. + p_terminates = True + + if p_terminates: + # symbol n terminates! + if not terminates[n]: + terminates[n] = True + some_change = True + # Don't need to consider any more productions for this n. + break + + if not some_change: + break + + infinite = [] + for (s, term) in terminates.items(): + if not term: + if s not in self.Prodnames and s not in self.Terminals and s != 'error': + # s is used-but-not-defined, and we've already warned of that, + # so it would be overkill to say that it's also non-terminating. + pass + else: + infinite.append(s) + + return infinite + + # ----------------------------------------------------------------------------- + # undefined_symbols() + # + # Find all symbols that were used the grammar, but not defined as tokens or + # grammar rules. Returns a list of tuples (sym, prod) where sym in the symbol + # and prod is the production where the symbol was used. + # ----------------------------------------------------------------------------- + def undefined_symbols(self): + result = [] + for p in self.Productions: + if not p: + continue + + for s in p.prod: + if s not in self.Prodnames and s not in self.Terminals and s != 'error': + result.append((s, p)) + return result + + # ----------------------------------------------------------------------------- + # unused_terminals() + # + # Find all terminals that were defined, but not used by the grammar. Returns + # a list of all symbols. + # ----------------------------------------------------------------------------- + def unused_terminals(self): + unused_tok = [] + for s, v in self.Terminals.items(): + if s != 'error' and not v: + unused_tok.append(s) + + return unused_tok + + # ------------------------------------------------------------------------------ + # unused_rules() + # + # Find all grammar rules that were defined, but not used (maybe not reachable) + # Returns a list of productions. + # ------------------------------------------------------------------------------ + + def unused_rules(self): + unused_prod = [] + for s, v in self.Nonterminals.items(): + if not v: + p = self.Prodnames[s][0] + unused_prod.append(p) + return unused_prod + + # ----------------------------------------------------------------------------- + # unused_precedence() + # + # Returns a list of tuples (term,precedence) corresponding to precedence + # rules that were never used by the grammar. term is the name of the terminal + # on which precedence was applied and precedence is a string such as 'left' or + # 'right' corresponding to the type of precedence. + # ----------------------------------------------------------------------------- + + def unused_precedence(self): + unused = [] + for termname in self.Precedence: + if not (termname in self.Terminals or termname in self.UsedPrecedence): + unused.append((termname, self.Precedence[termname][0])) + + return unused + + # ------------------------------------------------------------------------- + # _first() + # + # Compute the value of FIRST1(beta) where beta is a tuple of symbols. + # + # During execution of compute_first1, the result may be incomplete. + # Afterward (e.g., when called from compute_follow()), it will be complete. + # ------------------------------------------------------------------------- + def _first(self, beta): + + # We are computing First(x1,x2,x3,...,xn) + result = [] + for x in beta: + x_produces_empty = False + + # Add all the non- symbols of First[x] to the result. + for f in self.First[x]: + if f == '': + x_produces_empty = True + else: + if f not in result: + result.append(f) + + if x_produces_empty: + # We have to consider the next x in beta, + # i.e. stay in the loop. + pass + else: + # We don't have to consider any further symbols in beta. + break + else: + # There was no 'break' from the loop, + # so x_produces_empty was true for all x in beta, + # so beta produces empty as well. + result.append('') + + return result + + # ------------------------------------------------------------------------- + # compute_first() + # + # Compute the value of FIRST1(X) for all symbols + # ------------------------------------------------------------------------- + def compute_first(self): + if self.First: + return self.First + + # Terminals: + for t in self.Terminals: + self.First[t] = [t] + + self.First['$end'] = ['$end'] + + # Nonterminals: + + # Initialize to the empty set: + for n in self.Nonterminals: + self.First[n] = [] + + # Then propagate symbols until no change: + while True: + some_change = False + for n in self.Nonterminals: + for p in self.Prodnames[n]: + for f in self._first(p.prod): + if f not in self.First[n]: + self.First[n].append(f) + some_change = True + if not some_change: + break + + return self.First + + # --------------------------------------------------------------------- + # compute_follow() + # + # Computes all of the follow sets for every non-terminal symbol. The + # follow set is the set of all symbols that might follow a given + # non-terminal. See the Dragon book, 2nd Ed. p. 189. + # --------------------------------------------------------------------- + def compute_follow(self, start=None): + # If already computed, return the result + if self.Follow: + return self.Follow + + # If first sets not computed yet, do that first. + if not self.First: + self.compute_first() + + # Add '$end' to the follow list of the start symbol + for k in self.Nonterminals: + self.Follow[k] = [] + + if not start: + start = self.Productions[1].name + + self.Follow[start] = ['$end'] + + while True: + didadd = False + for p in self.Productions[1:]: + # Here is the production set + for i, B in enumerate(p.prod): + if B in self.Nonterminals: + # Okay. We got a non-terminal in a production + fst = self._first(p.prod[i+1:]) + hasempty = False + for f in fst: + if f != '' and f not in self.Follow[B]: + self.Follow[B].append(f) + didadd = True + if f == '': + hasempty = True + if hasempty or i == (len(p.prod)-1): + # Add elements of follow(a) to follow(b) + for f in self.Follow[p.name]: + if f not in self.Follow[B]: + self.Follow[B].append(f) + didadd = True + if not didadd: + break + return self.Follow + + + # ----------------------------------------------------------------------------- + # build_lritems() + # + # This function walks the list of productions and builds a complete set of the + # LR items. The LR items are stored in two ways: First, they are uniquely + # numbered and placed in the list _lritems. Second, a linked list of LR items + # is built for each production. For example: + # + # E -> E PLUS E + # + # Creates the list + # + # [E -> . E PLUS E, E -> E . PLUS E, E -> E PLUS . E, E -> E PLUS E . ] + # ----------------------------------------------------------------------------- + + def build_lritems(self): + for p in self.Productions: + lastlri = p + i = 0 + lr_items = [] + while True: + if i > len(p): + lri = None + else: + lri = LRItem(p, i) + # Precompute the list of productions immediately following + try: + lri.lr_after = self.Prodnames[lri.prod[i+1]] + except (IndexError, KeyError): + lri.lr_after = [] + try: + lri.lr_before = lri.prod[i-1] + except IndexError: + lri.lr_before = None + + lastlri.lr_next = lri + if not lri: + break + lr_items.append(lri) + lastlri = lri + i += 1 + p.lr_items = lr_items + +# ----------------------------------------------------------------------------- +# == Class LRTable == +# +# This basic class represents a basic table of LR parsing information. +# Methods for generating the tables are not defined here. They are defined +# in the derived class LRGeneratedTable. +# ----------------------------------------------------------------------------- + +class VersionError(YaccError): + pass + +class LRTable(object): + def __init__(self): + self.lr_action = None + self.lr_goto = None + self.lr_productions = None + self.lr_method = None + + def read_table(self, module): + if isinstance(module, types.ModuleType): + parsetab = module + else: + exec('import %s' % module) + parsetab = sys.modules[module] + + if parsetab._tabversion != __tabversion__: + raise VersionError('yacc table file version is out of date') + + self.lr_action = parsetab._lr_action + self.lr_goto = parsetab._lr_goto + + self.lr_productions = [] + for p in parsetab._lr_productions: + self.lr_productions.append(MiniProduction(*p)) + + self.lr_method = parsetab._lr_method + return parsetab._lr_signature + + def read_pickle(self, filename): + try: + import cPickle as pickle + except ImportError: + import pickle + + if not os.path.exists(filename): + raise ImportError + + in_f = open(filename, 'rb') + + tabversion = pickle.load(in_f) + if tabversion != __tabversion__: + raise VersionError('yacc table file version is out of date') + self.lr_method = pickle.load(in_f) + signature = pickle.load(in_f) + self.lr_action = pickle.load(in_f) + self.lr_goto = pickle.load(in_f) + productions = pickle.load(in_f) + + self.lr_productions = [] + for p in productions: + self.lr_productions.append(MiniProduction(*p)) + + in_f.close() + return signature + + # Bind all production function names to callable objects in pdict + def bind_callables(self, pdict): + for p in self.lr_productions: + p.bind(pdict) + + +# ----------------------------------------------------------------------------- +# === LR Generator === +# +# The following classes and functions are used to generate LR parsing tables on +# a grammar. +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# digraph() +# traverse() +# +# The following two functions are used to compute set valued functions +# of the form: +# +# F(x) = F'(x) U U{F(y) | x R y} +# +# This is used to compute the values of Read() sets as well as FOLLOW sets +# in LALR(1) generation. +# +# Inputs: X - An input set +# R - A relation +# FP - Set-valued function +# ------------------------------------------------------------------------------ + +def digraph(X, R, FP): + N = {} + for x in X: + N[x] = 0 + stack = [] + F = {} + for x in X: + if N[x] == 0: + traverse(x, N, stack, F, X, R, FP) + return F + +def traverse(x, N, stack, F, X, R, FP): + stack.append(x) + d = len(stack) + N[x] = d + F[x] = FP(x) # F(X) <- F'(x) + + rel = R(x) # Get y's related to x + for y in rel: + if N[y] == 0: + traverse(y, N, stack, F, X, R, FP) + N[x] = min(N[x], N[y]) + for a in F.get(y, []): + if a not in F[x]: + F[x].append(a) + if N[x] == d: + N[stack[-1]] = MAXINT + F[stack[-1]] = F[x] + element = stack.pop() + while element != x: + N[stack[-1]] = MAXINT + F[stack[-1]] = F[x] + element = stack.pop() + +class LALRError(YaccError): + pass + +# ----------------------------------------------------------------------------- +# == LRGeneratedTable == +# +# This class implements the LR table generation algorithm. There are no +# public methods except for write() +# ----------------------------------------------------------------------------- + +class LRGeneratedTable(LRTable): + def __init__(self, grammar, method='LALR', log=None): + if method not in ['SLR', 'LALR']: + raise LALRError('Unsupported method %s' % method) + + self.grammar = grammar + self.lr_method = method + + # Set up the logger + if not log: + log = NullLogger() + self.log = log + + # Internal attributes + self.lr_action = {} # Action table + self.lr_goto = {} # Goto table + self.lr_productions = grammar.Productions # Copy of grammar Production array + self.lr_goto_cache = {} # Cache of computed gotos + self.lr0_cidhash = {} # Cache of closures + + self._add_count = 0 # Internal counter used to detect cycles + + # Diagonistic information filled in by the table generator + self.sr_conflict = 0 + self.rr_conflict = 0 + self.conflicts = [] # List of conflicts + + self.sr_conflicts = [] + self.rr_conflicts = [] + + # Build the tables + self.grammar.build_lritems() + self.grammar.compute_first() + self.grammar.compute_follow() + self.lr_parse_table() + + # Compute the LR(0) closure operation on I, where I is a set of LR(0) items. + + def lr0_closure(self, I): + self._add_count += 1 + + # Add everything in I to J + J = I[:] + didadd = True + while didadd: + didadd = False + for j in J: + for x in j.lr_after: + if getattr(x, 'lr0_added', 0) == self._add_count: + continue + # Add B --> .G to J + J.append(x.lr_next) + x.lr0_added = self._add_count + didadd = True + + return J + + # Compute the LR(0) goto function goto(I,X) where I is a set + # of LR(0) items and X is a grammar symbol. This function is written + # in a way that guarantees uniqueness of the generated goto sets + # (i.e. the same goto set will never be returned as two different Python + # objects). With uniqueness, we can later do fast set comparisons using + # id(obj) instead of element-wise comparison. + + def lr0_goto(self, I, x): + # First we look for a previously cached entry + g = self.lr_goto_cache.get((id(I), x)) + if g: + return g + + # Now we generate the goto set in a way that guarantees uniqueness + # of the result + + s = self.lr_goto_cache.get(x) + if not s: + s = {} + self.lr_goto_cache[x] = s + + gs = [] + for p in I: + n = p.lr_next + if n and n.lr_before == x: + s1 = s.get(id(n)) + if not s1: + s1 = {} + s[id(n)] = s1 + gs.append(n) + s = s1 + g = s.get('$end') + if not g: + if gs: + g = self.lr0_closure(gs) + s['$end'] = g + else: + s['$end'] = gs + self.lr_goto_cache[(id(I), x)] = g + return g + + # Compute the LR(0) sets of item function + def lr0_items(self): + C = [self.lr0_closure([self.grammar.Productions[0].lr_next])] + i = 0 + for I in C: + self.lr0_cidhash[id(I)] = i + i += 1 + + # Loop over the items in C and each grammar symbols + i = 0 + while i < len(C): + I = C[i] + i += 1 + + # Collect all of the symbols that could possibly be in the goto(I,X) sets + asyms = {} + for ii in I: + for s in ii.usyms: + asyms[s] = None + + for x in asyms: + g = self.lr0_goto(I, x) + if not g or id(g) in self.lr0_cidhash: + continue + self.lr0_cidhash[id(g)] = len(C) + C.append(g) + + return C + + # ----------------------------------------------------------------------------- + # ==== LALR(1) Parsing ==== + # + # LALR(1) parsing is almost exactly the same as SLR except that instead of + # relying upon Follow() sets when performing reductions, a more selective + # lookahead set that incorporates the state of the LR(0) machine is utilized. + # Thus, we mainly just have to focus on calculating the lookahead sets. + # + # The method used here is due to DeRemer and Pennelo (1982). + # + # DeRemer, F. L., and T. J. Pennelo: "Efficient Computation of LALR(1) + # Lookahead Sets", ACM Transactions on Programming Languages and Systems, + # Vol. 4, No. 4, Oct. 1982, pp. 615-649 + # + # Further details can also be found in: + # + # J. Tremblay and P. Sorenson, "The Theory and Practice of Compiler Writing", + # McGraw-Hill Book Company, (1985). + # + # ----------------------------------------------------------------------------- + + # ----------------------------------------------------------------------------- + # compute_nullable_nonterminals() + # + # Creates a dictionary containing all of the non-terminals that might produce + # an empty production. + # ----------------------------------------------------------------------------- + + def compute_nullable_nonterminals(self): + nullable = set() + num_nullable = 0 + while True: + for p in self.grammar.Productions[1:]: + if p.len == 0: + nullable.add(p.name) + continue + for t in p.prod: + if t not in nullable: + break + else: + nullable.add(p.name) + if len(nullable) == num_nullable: + break + num_nullable = len(nullable) + return nullable + + # ----------------------------------------------------------------------------- + # find_nonterminal_trans(C) + # + # Given a set of LR(0) items, this functions finds all of the non-terminal + # transitions. These are transitions in which a dot appears immediately before + # a non-terminal. Returns a list of tuples of the form (state,N) where state + # is the state number and N is the nonterminal symbol. + # + # The input C is the set of LR(0) items. + # ----------------------------------------------------------------------------- + + def find_nonterminal_transitions(self, C): + trans = [] + for stateno, state in enumerate(C): + for p in state: + if p.lr_index < p.len - 1: + t = (stateno, p.prod[p.lr_index+1]) + if t[1] in self.grammar.Nonterminals: + if t not in trans: + trans.append(t) + return trans + + # ----------------------------------------------------------------------------- + # dr_relation() + # + # Computes the DR(p,A) relationships for non-terminal transitions. The input + # is a tuple (state,N) where state is a number and N is a nonterminal symbol. + # + # Returns a list of terminals. + # ----------------------------------------------------------------------------- + + def dr_relation(self, C, trans, nullable): + dr_set = {} + state, N = trans + terms = [] + + g = self.lr0_goto(C[state], N) + for p in g: + if p.lr_index < p.len - 1: + a = p.prod[p.lr_index+1] + if a in self.grammar.Terminals: + if a not in terms: + terms.append(a) + + # This extra bit is to handle the start state + if state == 0 and N == self.grammar.Productions[0].prod[0]: + terms.append('$end') + + return terms + + # ----------------------------------------------------------------------------- + # reads_relation() + # + # Computes the READS() relation (p,A) READS (t,C). + # ----------------------------------------------------------------------------- + + def reads_relation(self, C, trans, empty): + # Look for empty transitions + rel = [] + state, N = trans + + g = self.lr0_goto(C[state], N) + j = self.lr0_cidhash.get(id(g), -1) + for p in g: + if p.lr_index < p.len - 1: + a = p.prod[p.lr_index + 1] + if a in empty: + rel.append((j, a)) + + return rel + + # ----------------------------------------------------------------------------- + # compute_lookback_includes() + # + # Determines the lookback and includes relations + # + # LOOKBACK: + # + # This relation is determined by running the LR(0) state machine forward. + # For example, starting with a production "N : . A B C", we run it forward + # to obtain "N : A B C ." We then build a relationship between this final + # state and the starting state. These relationships are stored in a dictionary + # lookdict. + # + # INCLUDES: + # + # Computes the INCLUDE() relation (p,A) INCLUDES (p',B). + # + # This relation is used to determine non-terminal transitions that occur + # inside of other non-terminal transition states. (p,A) INCLUDES (p', B) + # if the following holds: + # + # B -> LAT, where T -> epsilon and p' -L-> p + # + # L is essentially a prefix (which may be empty), T is a suffix that must be + # able to derive an empty string. State p' must lead to state p with the string L. + # + # ----------------------------------------------------------------------------- + + def compute_lookback_includes(self, C, trans, nullable): + lookdict = {} # Dictionary of lookback relations + includedict = {} # Dictionary of include relations + + # Make a dictionary of non-terminal transitions + dtrans = {} + for t in trans: + dtrans[t] = 1 + + # Loop over all transitions and compute lookbacks and includes + for state, N in trans: + lookb = [] + includes = [] + for p in C[state]: + if p.name != N: + continue + + # Okay, we have a name match. We now follow the production all the way + # through the state machine until we get the . on the right hand side + + lr_index = p.lr_index + j = state + while lr_index < p.len - 1: + lr_index = lr_index + 1 + t = p.prod[lr_index] + + # Check to see if this symbol and state are a non-terminal transition + if (j, t) in dtrans: + # Yes. Okay, there is some chance that this is an includes relation + # the only way to know for certain is whether the rest of the + # production derives empty + + li = lr_index + 1 + while li < p.len: + if p.prod[li] in self.grammar.Terminals: + break # No forget it + if p.prod[li] not in nullable: + break + li = li + 1 + else: + # Appears to be a relation between (j,t) and (state,N) + includes.append((j, t)) + + g = self.lr0_goto(C[j], t) # Go to next set + j = self.lr0_cidhash.get(id(g), -1) # Go to next state + + # When we get here, j is the final state, now we have to locate the production + for r in C[j]: + if r.name != p.name: + continue + if r.len != p.len: + continue + i = 0 + # This look is comparing a production ". A B C" with "A B C ." + while i < r.lr_index: + if r.prod[i] != p.prod[i+1]: + break + i = i + 1 + else: + lookb.append((j, r)) + for i in includes: + if i not in includedict: + includedict[i] = [] + includedict[i].append((state, N)) + lookdict[(state, N)] = lookb + + return lookdict, includedict + + # ----------------------------------------------------------------------------- + # compute_read_sets() + # + # Given a set of LR(0) items, this function computes the read sets. + # + # Inputs: C = Set of LR(0) items + # ntrans = Set of nonterminal transitions + # nullable = Set of empty transitions + # + # Returns a set containing the read sets + # ----------------------------------------------------------------------------- + + def compute_read_sets(self, C, ntrans, nullable): + FP = lambda x: self.dr_relation(C, x, nullable) + R = lambda x: self.reads_relation(C, x, nullable) + F = digraph(ntrans, R, FP) + return F + + # ----------------------------------------------------------------------------- + # compute_follow_sets() + # + # Given a set of LR(0) items, a set of non-terminal transitions, a readset, + # and an include set, this function computes the follow sets + # + # Follow(p,A) = Read(p,A) U U {Follow(p',B) | (p,A) INCLUDES (p',B)} + # + # Inputs: + # ntrans = Set of nonterminal transitions + # readsets = Readset (previously computed) + # inclsets = Include sets (previously computed) + # + # Returns a set containing the follow sets + # ----------------------------------------------------------------------------- + + def compute_follow_sets(self, ntrans, readsets, inclsets): + FP = lambda x: readsets[x] + R = lambda x: inclsets.get(x, []) + F = digraph(ntrans, R, FP) + return F + + # ----------------------------------------------------------------------------- + # add_lookaheads() + # + # Attaches the lookahead symbols to grammar rules. + # + # Inputs: lookbacks - Set of lookback relations + # followset - Computed follow set + # + # This function directly attaches the lookaheads to productions contained + # in the lookbacks set + # ----------------------------------------------------------------------------- + + def add_lookaheads(self, lookbacks, followset): + for trans, lb in lookbacks.items(): + # Loop over productions in lookback + for state, p in lb: + if state not in p.lookaheads: + p.lookaheads[state] = [] + f = followset.get(trans, []) + for a in f: + if a not in p.lookaheads[state]: + p.lookaheads[state].append(a) + + # ----------------------------------------------------------------------------- + # add_lalr_lookaheads() + # + # This function does all of the work of adding lookahead information for use + # with LALR parsing + # ----------------------------------------------------------------------------- + + def add_lalr_lookaheads(self, C): + # Determine all of the nullable nonterminals + nullable = self.compute_nullable_nonterminals() + + # Find all non-terminal transitions + trans = self.find_nonterminal_transitions(C) + + # Compute read sets + readsets = self.compute_read_sets(C, trans, nullable) + + # Compute lookback/includes relations + lookd, included = self.compute_lookback_includes(C, trans, nullable) + + # Compute LALR FOLLOW sets + followsets = self.compute_follow_sets(trans, readsets, included) + + # Add all of the lookaheads + self.add_lookaheads(lookd, followsets) + + # ----------------------------------------------------------------------------- + # lr_parse_table() + # + # This function constructs the parse tables for SLR or LALR + # ----------------------------------------------------------------------------- + def lr_parse_table(self): + Productions = self.grammar.Productions + Precedence = self.grammar.Precedence + goto = self.lr_goto # Goto array + action = self.lr_action # Action array + log = self.log # Logger for output + + actionp = {} # Action production array (temporary) + + log.info('Parsing method: %s', self.lr_method) + + # Step 1: Construct C = { I0, I1, ... IN}, collection of LR(0) items + # This determines the number of states + + C = self.lr0_items() + + if self.lr_method == 'LALR': + self.add_lalr_lookaheads(C) + + # Build the parser table, state by state + st = 0 + for I in C: + # Loop over each production in I + actlist = [] # List of actions + st_action = {} + st_actionp = {} + st_goto = {} + log.info('') + log.info('state %d', st) + log.info('') + for p in I: + log.info(' (%d) %s', p.number, p) + log.info('') + + for p in I: + if p.len == p.lr_index + 1: + if p.name == "S'": + # Start symbol. Accept! + st_action['$end'] = 0 + st_actionp['$end'] = p + else: + # We are at the end of a production. Reduce! + if self.lr_method == 'LALR': + laheads = p.lookaheads[st] + else: + laheads = self.grammar.Follow[p.name] + for a in laheads: + actlist.append((a, p, 'reduce using rule %d (%s)' % (p.number, p))) + r = st_action.get(a) + if r is not None: + # Whoa. Have a shift/reduce or reduce/reduce conflict + if r > 0: + # Need to decide on shift or reduce here + # By default we favor shifting. Need to add + # some precedence rules here. + + # Shift precedence comes from the token + sprec, slevel = Precedence.get(a, ('right', 0)) + + # Reduce precedence comes from rule being reduced (p) + rprec, rlevel = Productions[p.number].prec + + if (slevel < rlevel) or ((slevel == rlevel) and (rprec == 'left')): + # We really need to reduce here. + st_action[a] = -p.number + st_actionp[a] = p + if not slevel and not rlevel: + log.info(' ! shift/reduce conflict for %s resolved as reduce', a) + self.sr_conflicts.append((st, a, 'reduce')) + Productions[p.number].reduced += 1 + elif (slevel == rlevel) and (rprec == 'nonassoc'): + st_action[a] = None + else: + # Hmmm. Guess we'll keep the shift + if not rlevel: + log.info(' ! shift/reduce conflict for %s resolved as shift', a) + self.sr_conflicts.append((st, a, 'shift')) + elif r < 0: + # Reduce/reduce conflict. In this case, we favor the rule + # that was defined first in the grammar file + oldp = Productions[-r] + pp = Productions[p.number] + if oldp.line > pp.line: + st_action[a] = -p.number + st_actionp[a] = p + chosenp, rejectp = pp, oldp + Productions[p.number].reduced += 1 + Productions[oldp.number].reduced -= 1 + else: + chosenp, rejectp = oldp, pp + self.rr_conflicts.append((st, chosenp, rejectp)) + log.info(' ! reduce/reduce conflict for %s resolved using rule %d (%s)', + a, st_actionp[a].number, st_actionp[a]) + else: + raise LALRError('Unknown conflict in state %d' % st) + else: + st_action[a] = -p.number + st_actionp[a] = p + Productions[p.number].reduced += 1 + else: + i = p.lr_index + a = p.prod[i+1] # Get symbol right after the "." + if a in self.grammar.Terminals: + g = self.lr0_goto(I, a) + j = self.lr0_cidhash.get(id(g), -1) + if j >= 0: + # We are in a shift state + actlist.append((a, p, 'shift and go to state %d' % j)) + r = st_action.get(a) + if r is not None: + # Whoa have a shift/reduce or shift/shift conflict + if r > 0: + if r != j: + raise LALRError('Shift/shift conflict in state %d' % st) + elif r < 0: + # Do a precedence check. + # - if precedence of reduce rule is higher, we reduce. + # - if precedence of reduce is same and left assoc, we reduce. + # - otherwise we shift + + # Shift precedence comes from the token + sprec, slevel = Precedence.get(a, ('right', 0)) + + # Reduce precedence comes from the rule that could have been reduced + rprec, rlevel = Productions[st_actionp[a].number].prec + + if (slevel > rlevel) or ((slevel == rlevel) and (rprec == 'right')): + # We decide to shift here... highest precedence to shift + Productions[st_actionp[a].number].reduced -= 1 + st_action[a] = j + st_actionp[a] = p + if not rlevel: + log.info(' ! shift/reduce conflict for %s resolved as shift', a) + self.sr_conflicts.append((st, a, 'shift')) + elif (slevel == rlevel) and (rprec == 'nonassoc'): + st_action[a] = None + else: + # Hmmm. Guess we'll keep the reduce + if not slevel and not rlevel: + log.info(' ! shift/reduce conflict for %s resolved as reduce', a) + self.sr_conflicts.append((st, a, 'reduce')) + + else: + raise LALRError('Unknown conflict in state %d' % st) + else: + st_action[a] = j + st_actionp[a] = p + + # Print the actions associated with each terminal + _actprint = {} + for a, p, m in actlist: + if a in st_action: + if p is st_actionp[a]: + log.info(' %-15s %s', a, m) + _actprint[(a, m)] = 1 + log.info('') + # Print the actions that were not used. (debugging) + not_used = 0 + for a, p, m in actlist: + if a in st_action: + if p is not st_actionp[a]: + if not (a, m) in _actprint: + log.debug(' ! %-15s [ %s ]', a, m) + not_used = 1 + _actprint[(a, m)] = 1 + if not_used: + log.debug('') + + # Construct the goto table for this state + + nkeys = {} + for ii in I: + for s in ii.usyms: + if s in self.grammar.Nonterminals: + nkeys[s] = None + for n in nkeys: + g = self.lr0_goto(I, n) + j = self.lr0_cidhash.get(id(g), -1) + if j >= 0: + st_goto[n] = j + log.info(' %-30s shift and go to state %d', n, j) + + action[st] = st_action + actionp[st] = st_actionp + goto[st] = st_goto + st += 1 + + # ----------------------------------------------------------------------------- + # write() + # + # This function writes the LR parsing tables to a file + # ----------------------------------------------------------------------------- + + def write_table(self, tabmodule, outputdir='', signature=''): + if isinstance(tabmodule, types.ModuleType): + raise IOError("Won't overwrite existing tabmodule") + + basemodulename = tabmodule.split('.')[-1] + filename = os.path.join(outputdir, basemodulename) + '.py' + try: + f = open(filename, 'w') + + f.write(''' +# %s +# This file is automatically generated. Do not edit. +_tabversion = %r + +_lr_method = %r + +_lr_signature = %r + ''' % (os.path.basename(filename), __tabversion__, self.lr_method, signature)) + + # Change smaller to 0 to go back to original tables + smaller = 1 + + # Factor out names to try and make smaller + if smaller: + items = {} + + for s, nd in self.lr_action.items(): + for name, v in nd.items(): + i = items.get(name) + if not i: + i = ([], []) + items[name] = i + i[0].append(s) + i[1].append(v) + + f.write('\n_lr_action_items = {') + for k, v in items.items(): + f.write('%r:([' % k) + for i in v[0]: + f.write('%r,' % i) + f.write('],[') + for i in v[1]: + f.write('%r,' % i) + + f.write(']),') + f.write('}\n') + + f.write(''' +_lr_action = {} +for _k, _v in _lr_action_items.items(): + for _x,_y in zip(_v[0],_v[1]): + if not _x in _lr_action: _lr_action[_x] = {} + _lr_action[_x][_k] = _y +del _lr_action_items +''') + + else: + f.write('\n_lr_action = { ') + for k, v in self.lr_action.items(): + f.write('(%r,%r):%r,' % (k[0], k[1], v)) + f.write('}\n') + + if smaller: + # Factor out names to try and make smaller + items = {} + + for s, nd in self.lr_goto.items(): + for name, v in nd.items(): + i = items.get(name) + if not i: + i = ([], []) + items[name] = i + i[0].append(s) + i[1].append(v) + + f.write('\n_lr_goto_items = {') + for k, v in items.items(): + f.write('%r:([' % k) + for i in v[0]: + f.write('%r,' % i) + f.write('],[') + for i in v[1]: + f.write('%r,' % i) + + f.write(']),') + f.write('}\n') + + f.write(''' +_lr_goto = {} +for _k, _v in _lr_goto_items.items(): + for _x, _y in zip(_v[0], _v[1]): + if not _x in _lr_goto: _lr_goto[_x] = {} + _lr_goto[_x][_k] = _y +del _lr_goto_items +''') + else: + f.write('\n_lr_goto = { ') + for k, v in self.lr_goto.items(): + f.write('(%r,%r):%r,' % (k[0], k[1], v)) + f.write('}\n') + + # Write production table + f.write('_lr_productions = [\n') + for p in self.lr_productions: + if p.func: + f.write(' (%r,%r,%d,%r,%r,%d),\n' % (p.str, p.name, p.len, + p.func, os.path.basename(p.file), p.line)) + else: + f.write(' (%r,%r,%d,None,None,None),\n' % (str(p), p.name, p.len)) + f.write(']\n') + f.close() + + except IOError as e: + raise + + + # ----------------------------------------------------------------------------- + # pickle_table() + # + # This function pickles the LR parsing tables to a supplied file object + # ----------------------------------------------------------------------------- + + def pickle_table(self, filename, signature=''): + try: + import cPickle as pickle + except ImportError: + import pickle + with open(filename, 'wb') as outf: + pickle.dump(__tabversion__, outf, pickle_protocol) + pickle.dump(self.lr_method, outf, pickle_protocol) + pickle.dump(signature, outf, pickle_protocol) + pickle.dump(self.lr_action, outf, pickle_protocol) + pickle.dump(self.lr_goto, outf, pickle_protocol) + + outp = [] + for p in self.lr_productions: + if p.func: + outp.append((p.str, p.name, p.len, p.func, os.path.basename(p.file), p.line)) + else: + outp.append((str(p), p.name, p.len, None, None, None)) + pickle.dump(outp, outf, pickle_protocol) + +# ----------------------------------------------------------------------------- +# === INTROSPECTION === +# +# The following functions and classes are used to implement the PLY +# introspection features followed by the yacc() function itself. +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# get_caller_module_dict() +# +# This function returns a dictionary containing all of the symbols defined within +# a caller further down the call stack. This is used to get the environment +# associated with the yacc() call if none was provided. +# ----------------------------------------------------------------------------- + +def get_caller_module_dict(levels): + f = sys._getframe(levels) + ldict = f.f_globals.copy() + if f.f_globals != f.f_locals: + ldict.update(f.f_locals) + return ldict + +# ----------------------------------------------------------------------------- +# parse_grammar() +# +# This takes a raw grammar rule string and parses it into production data +# ----------------------------------------------------------------------------- +def parse_grammar(doc, file, line): + grammar = [] + # Split the doc string into lines + pstrings = doc.splitlines() + lastp = None + dline = line + for ps in pstrings: + dline += 1 + p = ps.split() + if not p: + continue + try: + if p[0] == '|': + # This is a continuation of a previous rule + if not lastp: + raise SyntaxError("%s:%d: Misplaced '|'" % (file, dline)) + prodname = lastp + syms = p[1:] + else: + prodname = p[0] + lastp = prodname + syms = p[2:] + assign = p[1] + if assign != ':' and assign != '::=': + raise SyntaxError("%s:%d: Syntax error. Expected ':'" % (file, dline)) + + grammar.append((file, dline, prodname, syms)) + except SyntaxError: + raise + except Exception: + raise SyntaxError('%s:%d: Syntax error in rule %r' % (file, dline, ps.strip())) + + return grammar + +# ----------------------------------------------------------------------------- +# ParserReflect() +# +# This class represents information extracted for building a parser including +# start symbol, error function, tokens, precedence list, action functions, +# etc. +# ----------------------------------------------------------------------------- +class ParserReflect(object): + def __init__(self, pdict, log=None): + self.pdict = pdict + self.start = None + self.error_func = None + self.tokens = None + self.modules = set() + self.grammar = [] + self.error = False + + if log is None: + self.log = PlyLogger(sys.stderr) + else: + self.log = log + + # Get all of the basic information + def get_all(self): + self.get_start() + self.get_error_func() + self.get_tokens() + self.get_precedence() + self.get_pfunctions() + + # Validate all of the information + def validate_all(self): + self.validate_start() + self.validate_error_func() + self.validate_tokens() + self.validate_precedence() + self.validate_pfunctions() + self.validate_modules() + return self.error + + # Compute a signature over the grammar + def signature(self): + parts = [] + try: + if self.start: + parts.append(self.start) + if self.prec: + parts.append(''.join([''.join(p) for p in self.prec])) + if self.tokens: + parts.append(' '.join(self.tokens)) + for f in self.pfuncs: + if f[3]: + parts.append(f[3]) + except (TypeError, ValueError): + pass + return ''.join(parts) + + # ----------------------------------------------------------------------------- + # validate_modules() + # + # This method checks to see if there are duplicated p_rulename() functions + # in the parser module file. Without this function, it is really easy for + # users to make mistakes by cutting and pasting code fragments (and it's a real + # bugger to try and figure out why the resulting parser doesn't work). Therefore, + # we just do a little regular expression pattern matching of def statements + # to try and detect duplicates. + # ----------------------------------------------------------------------------- + + def validate_modules(self): + # Match def p_funcname( + fre = re.compile(r'\s*def\s+(p_[a-zA-Z_0-9]*)\(') + + for module in self.modules: + try: + lines, linen = inspect.getsourcelines(module) + except IOError: + continue + + counthash = {} + for linen, line in enumerate(lines): + linen += 1 + m = fre.match(line) + if m: + name = m.group(1) + prev = counthash.get(name) + if not prev: + counthash[name] = linen + else: + filename = inspect.getsourcefile(module) + self.log.warning('%s:%d: Function %s redefined. Previously defined on line %d', + filename, linen, name, prev) + + # Get the start symbol + def get_start(self): + self.start = self.pdict.get('start') + + # Validate the start symbol + def validate_start(self): + if self.start is not None: + if not isinstance(self.start, string_types): + self.log.error("'start' must be a string") + + # Look for error handler + def get_error_func(self): + self.error_func = self.pdict.get('p_error') + + # Validate the error function + def validate_error_func(self): + if self.error_func: + if isinstance(self.error_func, types.FunctionType): + ismethod = 0 + elif isinstance(self.error_func, types.MethodType): + ismethod = 1 + else: + self.log.error("'p_error' defined, but is not a function or method") + self.error = True + return + + eline = self.error_func.__code__.co_firstlineno + efile = self.error_func.__code__.co_filename + module = inspect.getmodule(self.error_func) + self.modules.add(module) + + argcount = self.error_func.__code__.co_argcount - ismethod + if argcount != 1: + self.log.error('%s:%d: p_error() requires 1 argument', efile, eline) + self.error = True + + # Get the tokens map + def get_tokens(self): + tokens = self.pdict.get('tokens') + if not tokens: + self.log.error('No token list is defined') + self.error = True + return + + if not isinstance(tokens, (list, tuple)): + self.log.error('tokens must be a list or tuple') + self.error = True + return + + if not tokens: + self.log.error('tokens is empty') + self.error = True + return + + self.tokens = tokens + + # Validate the tokens + def validate_tokens(self): + # Validate the tokens. + if 'error' in self.tokens: + self.log.error("Illegal token name 'error'. Is a reserved word") + self.error = True + return + + terminals = set() + for n in self.tokens: + if n in terminals: + self.log.warning('Token %r multiply defined', n) + terminals.add(n) + + # Get the precedence map (if any) + def get_precedence(self): + self.prec = self.pdict.get('precedence') + + # Validate and parse the precedence map + def validate_precedence(self): + preclist = [] + if self.prec: + if not isinstance(self.prec, (list, tuple)): + self.log.error('precedence must be a list or tuple') + self.error = True + return + for level, p in enumerate(self.prec): + if not isinstance(p, (list, tuple)): + self.log.error('Bad precedence table') + self.error = True + return + + if len(p) < 2: + self.log.error('Malformed precedence entry %s. Must be (assoc, term, ..., term)', p) + self.error = True + return + assoc = p[0] + if not isinstance(assoc, string_types): + self.log.error('precedence associativity must be a string') + self.error = True + return + for term in p[1:]: + if not isinstance(term, string_types): + self.log.error('precedence items must be strings') + self.error = True + return + preclist.append((term, assoc, level+1)) + self.preclist = preclist + + # Get all p_functions from the grammar + def get_pfunctions(self): + p_functions = [] + for name, item in self.pdict.items(): + if not name.startswith('p_') or name == 'p_error': + continue + if isinstance(item, (types.FunctionType, types.MethodType)): + line = getattr(item, 'co_firstlineno', item.__code__.co_firstlineno) + module = inspect.getmodule(item) + p_functions.append((line, module, name, item.__doc__)) + + # Sort all of the actions by line number; make sure to stringify + # modules to make them sortable, since `line` may not uniquely sort all + # p functions + p_functions.sort(key=lambda p_function: ( + p_function[0], + str(p_function[1]), + p_function[2], + p_function[3])) + self.pfuncs = p_functions + + # Validate all of the p_functions + def validate_pfunctions(self): + grammar = [] + # Check for non-empty symbols + if len(self.pfuncs) == 0: + self.log.error('no rules of the form p_rulename are defined') + self.error = True + return + + for line, module, name, doc in self.pfuncs: + file = inspect.getsourcefile(module) + func = self.pdict[name] + if isinstance(func, types.MethodType): + reqargs = 2 + else: + reqargs = 1 + if func.__code__.co_argcount > reqargs: + self.log.error('%s:%d: Rule %r has too many arguments', file, line, func.__name__) + self.error = True + elif func.__code__.co_argcount < reqargs: + self.log.error('%s:%d: Rule %r requires an argument', file, line, func.__name__) + self.error = True + elif not func.__doc__: + self.log.warning('%s:%d: No documentation string specified in function %r (ignored)', + file, line, func.__name__) + else: + try: + parsed_g = parse_grammar(doc, file, line) + for g in parsed_g: + grammar.append((name, g)) + except SyntaxError as e: + self.log.error(str(e)) + self.error = True + + # Looks like a valid grammar rule + # Mark the file in which defined. + self.modules.add(module) + + # Secondary validation step that looks for p_ definitions that are not functions + # or functions that look like they might be grammar rules. + + for n, v in self.pdict.items(): + if n.startswith('p_') and isinstance(v, (types.FunctionType, types.MethodType)): + continue + if n.startswith('t_'): + continue + if n.startswith('p_') and n != 'p_error': + self.log.warning('%r not defined as a function', n) + if ((isinstance(v, types.FunctionType) and v.__code__.co_argcount == 1) or + (isinstance(v, types.MethodType) and v.__func__.__code__.co_argcount == 2)): + if v.__doc__: + try: + doc = v.__doc__.split(' ') + if doc[1] == ':': + self.log.warning('%s:%d: Possible grammar rule %r defined without p_ prefix', + v.__code__.co_filename, v.__code__.co_firstlineno, n) + except IndexError: + pass + + self.grammar = grammar + +# ----------------------------------------------------------------------------- +# yacc(module) +# +# Build a parser +# ----------------------------------------------------------------------------- + +def yacc(method='LALR', debug=yaccdebug, module=None, tabmodule=tab_module, start=None, + check_recursion=True, optimize=False, write_tables=True, debugfile=debug_file, + outputdir=None, debuglog=None, errorlog=None, picklefile=None): + + if tabmodule is None: + tabmodule = tab_module + + # Reference to the parsing method of the last built parser + global parse + + # If pickling is enabled, table files are not created + if picklefile: + write_tables = 0 + + if errorlog is None: + errorlog = PlyLogger(sys.stderr) + + # Get the module dictionary used for the parser + if module: + _items = [(k, getattr(module, k)) for k in dir(module)] + pdict = dict(_items) + # If no __file__ attribute is available, try to obtain it from the __module__ instead + if '__file__' not in pdict: + pdict['__file__'] = sys.modules[pdict['__module__']].__file__ + else: + pdict = get_caller_module_dict(2) + + if outputdir is None: + # If no output directory is set, the location of the output files + # is determined according to the following rules: + # - If tabmodule specifies a package, files go into that package directory + # - Otherwise, files go in the same directory as the specifying module + if isinstance(tabmodule, types.ModuleType): + srcfile = tabmodule.__file__ + else: + if '.' not in tabmodule: + srcfile = pdict['__file__'] + else: + parts = tabmodule.split('.') + pkgname = '.'.join(parts[:-1]) + exec('import %s' % pkgname) + srcfile = getattr(sys.modules[pkgname], '__file__', '') + outputdir = os.path.dirname(srcfile) + + # Determine if the module is package of a package or not. + # If so, fix the tabmodule setting so that tables load correctly + pkg = pdict.get('__package__') + if pkg and isinstance(tabmodule, str): + if '.' not in tabmodule: + tabmodule = pkg + '.' + tabmodule + + + + # Set start symbol if it's specified directly using an argument + if start is not None: + pdict['start'] = start + + # Collect parser information from the dictionary + pinfo = ParserReflect(pdict, log=errorlog) + pinfo.get_all() + + if pinfo.error: + raise YaccError('Unable to build parser') + + # Check signature against table files (if any) + signature = pinfo.signature() + + # Read the tables + try: + lr = LRTable() + if picklefile: + read_signature = lr.read_pickle(picklefile) + else: + read_signature = lr.read_table(tabmodule) + if optimize or (read_signature == signature): + try: + lr.bind_callables(pinfo.pdict) + parser = LRParser(lr, pinfo.error_func) + parse = parser.parse + return parser + except Exception as e: + errorlog.warning('There was a problem loading the table file: %r', e) + except VersionError as e: + errorlog.warning(str(e)) + except ImportError: + pass + + if debuglog is None: + if debug: + try: + debuglog = PlyLogger(open(os.path.join(outputdir, debugfile), 'w')) + except IOError as e: + errorlog.warning("Couldn't open %r. %s" % (debugfile, e)) + debuglog = NullLogger() + else: + debuglog = NullLogger() + + debuglog.info('Created by PLY version %s (http://www.dabeaz.com/ply)', __version__) + + errors = False + + # Validate the parser information + if pinfo.validate_all(): + raise YaccError('Unable to build parser') + + if not pinfo.error_func: + errorlog.warning('no p_error() function is defined') + + # Create a grammar object + grammar = Grammar(pinfo.tokens) + + # Set precedence level for terminals + for term, assoc, level in pinfo.preclist: + try: + grammar.set_precedence(term, assoc, level) + except GrammarError as e: + errorlog.warning('%s', e) + + # Add productions to the grammar + for funcname, gram in pinfo.grammar: + file, line, prodname, syms = gram + try: + grammar.add_production(prodname, syms, funcname, file, line) + except GrammarError as e: + errorlog.error('%s', e) + errors = True + + # Set the grammar start symbols + try: + if start is None: + grammar.set_start(pinfo.start) + else: + grammar.set_start(start) + except GrammarError as e: + errorlog.error(str(e)) + errors = True + + if errors: + raise YaccError('Unable to build parser') + + # Verify the grammar structure + undefined_symbols = grammar.undefined_symbols() + for sym, prod in undefined_symbols: + errorlog.error('%s:%d: Symbol %r used, but not defined as a token or a rule', prod.file, prod.line, sym) + errors = True + + unused_terminals = grammar.unused_terminals() + if unused_terminals: + debuglog.info('') + debuglog.info('Unused terminals:') + debuglog.info('') + for term in unused_terminals: + errorlog.warning('Token %r defined, but not used', term) + debuglog.info(' %s', term) + + # Print out all productions to the debug log + if debug: + debuglog.info('') + debuglog.info('Grammar') + debuglog.info('') + for n, p in enumerate(grammar.Productions): + debuglog.info('Rule %-5d %s', n, p) + + # Find unused non-terminals + unused_rules = grammar.unused_rules() + for prod in unused_rules: + errorlog.warning('%s:%d: Rule %r defined, but not used', prod.file, prod.line, prod.name) + + if len(unused_terminals) == 1: + errorlog.warning('There is 1 unused token') + if len(unused_terminals) > 1: + errorlog.warning('There are %d unused tokens', len(unused_terminals)) + + if len(unused_rules) == 1: + errorlog.warning('There is 1 unused rule') + if len(unused_rules) > 1: + errorlog.warning('There are %d unused rules', len(unused_rules)) + + if debug: + debuglog.info('') + debuglog.info('Terminals, with rules where they appear') + debuglog.info('') + terms = list(grammar.Terminals) + terms.sort() + for term in terms: + debuglog.info('%-20s : %s', term, ' '.join([str(s) for s in grammar.Terminals[term]])) + + debuglog.info('') + debuglog.info('Nonterminals, with rules where they appear') + debuglog.info('') + nonterms = list(grammar.Nonterminals) + nonterms.sort() + for nonterm in nonterms: + debuglog.info('%-20s : %s', nonterm, ' '.join([str(s) for s in grammar.Nonterminals[nonterm]])) + debuglog.info('') + + if check_recursion: + unreachable = grammar.find_unreachable() + for u in unreachable: + errorlog.warning('Symbol %r is unreachable', u) + + infinite = grammar.infinite_cycles() + for inf in infinite: + errorlog.error('Infinite recursion detected for symbol %r', inf) + errors = True + + unused_prec = grammar.unused_precedence() + for term, assoc in unused_prec: + errorlog.error('Precedence rule %r defined for unknown symbol %r', assoc, term) + errors = True + + if errors: + raise YaccError('Unable to build parser') + + # Run the LRGeneratedTable on the grammar + if debug: + errorlog.debug('Generating %s tables', method) + + lr = LRGeneratedTable(grammar, method, debuglog) + + if debug: + num_sr = len(lr.sr_conflicts) + + # Report shift/reduce and reduce/reduce conflicts + if num_sr == 1: + errorlog.warning('1 shift/reduce conflict') + elif num_sr > 1: + errorlog.warning('%d shift/reduce conflicts', num_sr) + + num_rr = len(lr.rr_conflicts) + if num_rr == 1: + errorlog.warning('1 reduce/reduce conflict') + elif num_rr > 1: + errorlog.warning('%d reduce/reduce conflicts', num_rr) + + # Write out conflicts to the output file + if debug and (lr.sr_conflicts or lr.rr_conflicts): + debuglog.warning('') + debuglog.warning('Conflicts:') + debuglog.warning('') + + for state, tok, resolution in lr.sr_conflicts: + debuglog.warning('shift/reduce conflict for %s in state %d resolved as %s', tok, state, resolution) + + already_reported = set() + for state, rule, rejected in lr.rr_conflicts: + if (state, id(rule), id(rejected)) in already_reported: + continue + debuglog.warning('reduce/reduce conflict in state %d resolved using rule (%s)', state, rule) + debuglog.warning('rejected rule (%s) in state %d', rejected, state) + errorlog.warning('reduce/reduce conflict in state %d resolved using rule (%s)', state, rule) + errorlog.warning('rejected rule (%s) in state %d', rejected, state) + already_reported.add((state, id(rule), id(rejected))) + + warned_never = [] + for state, rule, rejected in lr.rr_conflicts: + if not rejected.reduced and (rejected not in warned_never): + debuglog.warning('Rule (%s) is never reduced', rejected) + errorlog.warning('Rule (%s) is never reduced', rejected) + warned_never.append(rejected) + + # Write the table file if requested + if write_tables: + try: + lr.write_table(tabmodule, outputdir, signature) + except IOError as e: + errorlog.warning("Couldn't create %r. %s" % (tabmodule, e)) + + # Write a pickled version of the tables + if picklefile: + try: + lr.pickle_table(picklefile, signature) + except IOError as e: + errorlog.warning("Couldn't create %r. %s" % (picklefile, e)) + + # Build the parser + lr.bind_callables(pinfo.pdict) + parser = LRParser(lr, pinfo.error_func) + + parse = parser.parse + return parser diff --git a/venv/lib/python3.12/site-packages/pycparser/ply/ygen.py b/venv/lib/python3.12/site-packages/pycparser/ply/ygen.py new file mode 100644 index 00000000..acf5ca1a --- /dev/null +++ b/venv/lib/python3.12/site-packages/pycparser/ply/ygen.py @@ -0,0 +1,74 @@ +# ply: ygen.py +# +# This is a support program that auto-generates different versions of the YACC parsing +# function with different features removed for the purposes of performance. +# +# Users should edit the method LParser.parsedebug() in yacc.py. The source code +# for that method is then used to create the other methods. See the comments in +# yacc.py for further details. + +import os.path +import shutil + +def get_source_range(lines, tag): + srclines = enumerate(lines) + start_tag = '#--! %s-start' % tag + end_tag = '#--! %s-end' % tag + + for start_index, line in srclines: + if line.strip().startswith(start_tag): + break + + for end_index, line in srclines: + if line.strip().endswith(end_tag): + break + + return (start_index + 1, end_index) + +def filter_section(lines, tag): + filtered_lines = [] + include = True + tag_text = '#--! %s' % tag + for line in lines: + if line.strip().startswith(tag_text): + include = not include + elif include: + filtered_lines.append(line) + return filtered_lines + +def main(): + dirname = os.path.dirname(__file__) + shutil.copy2(os.path.join(dirname, 'yacc.py'), os.path.join(dirname, 'yacc.py.bak')) + with open(os.path.join(dirname, 'yacc.py'), 'r') as f: + lines = f.readlines() + + parse_start, parse_end = get_source_range(lines, 'parsedebug') + parseopt_start, parseopt_end = get_source_range(lines, 'parseopt') + parseopt_notrack_start, parseopt_notrack_end = get_source_range(lines, 'parseopt-notrack') + + # Get the original source + orig_lines = lines[parse_start:parse_end] + + # Filter the DEBUG sections out + parseopt_lines = filter_section(orig_lines, 'DEBUG') + + # Filter the TRACKING sections out + parseopt_notrack_lines = filter_section(parseopt_lines, 'TRACKING') + + # Replace the parser source sections with updated versions + lines[parseopt_notrack_start:parseopt_notrack_end] = parseopt_notrack_lines + lines[parseopt_start:parseopt_end] = parseopt_lines + + lines = [line.rstrip()+'\n' for line in lines] + with open(os.path.join(dirname, 'yacc.py'), 'w') as f: + f.writelines(lines) + + print('Updated yacc.py') + +if __name__ == '__main__': + main() + + + + + diff --git a/venv/lib/python3.12/site-packages/pycparser/plyparser.py b/venv/lib/python3.12/site-packages/pycparser/plyparser.py new file mode 100644 index 00000000..b8f4c439 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pycparser/plyparser.py @@ -0,0 +1,133 @@ +#----------------------------------------------------------------- +# plyparser.py +# +# PLYParser class and other utilities for simplifying programming +# parsers with PLY +# +# Eli Bendersky [https://eli.thegreenplace.net/] +# License: BSD +#----------------------------------------------------------------- + +import warnings + +class Coord(object): + """ Coordinates of a syntactic element. Consists of: + - File name + - Line number + - (optional) column number, for the Lexer + """ + __slots__ = ('file', 'line', 'column', '__weakref__') + def __init__(self, file, line, column=None): + self.file = file + self.line = line + self.column = column + + def __str__(self): + str = "%s:%s" % (self.file, self.line) + if self.column: str += ":%s" % self.column + return str + + +class ParseError(Exception): pass + + +class PLYParser(object): + def _create_opt_rule(self, rulename): + """ Given a rule name, creates an optional ply.yacc rule + for it. The name of the optional rule is + _opt + """ + optname = rulename + '_opt' + + def optrule(self, p): + p[0] = p[1] + + optrule.__doc__ = '%s : empty\n| %s' % (optname, rulename) + optrule.__name__ = 'p_%s' % optname + setattr(self.__class__, optrule.__name__, optrule) + + def _coord(self, lineno, column=None): + return Coord( + file=self.clex.filename, + line=lineno, + column=column) + + def _token_coord(self, p, token_idx): + """ Returns the coordinates for the YaccProduction object 'p' indexed + with 'token_idx'. The coordinate includes the 'lineno' and + 'column'. Both follow the lex semantic, starting from 1. + """ + last_cr = p.lexer.lexer.lexdata.rfind('\n', 0, p.lexpos(token_idx)) + if last_cr < 0: + last_cr = -1 + column = (p.lexpos(token_idx) - (last_cr)) + return self._coord(p.lineno(token_idx), column) + + def _parse_error(self, msg, coord): + raise ParseError("%s: %s" % (coord, msg)) + + +def parameterized(*params): + """ Decorator to create parameterized rules. + + Parameterized rule methods must be named starting with 'p_' and contain + 'xxx', and their docstrings may contain 'xxx' and 'yyy'. These will be + replaced by the given parameter tuples. For example, ``p_xxx_rule()`` with + docstring 'xxx_rule : yyy' when decorated with + ``@parameterized(('id', 'ID'))`` produces ``p_id_rule()`` with the docstring + 'id_rule : ID'. Using multiple tuples produces multiple rules. + """ + def decorate(rule_func): + rule_func._params = params + return rule_func + return decorate + + +def template(cls): + """ Class decorator to generate rules from parameterized rule templates. + + See `parameterized` for more information on parameterized rules. + """ + issued_nodoc_warning = False + for attr_name in dir(cls): + if attr_name.startswith('p_'): + method = getattr(cls, attr_name) + if hasattr(method, '_params'): + # Remove the template method + delattr(cls, attr_name) + # Create parameterized rules from this method; only run this if + # the method has a docstring. This is to address an issue when + # pycparser's users are installed in -OO mode which strips + # docstrings away. + # See: https://github.com/eliben/pycparser/pull/198/ and + # https://github.com/eliben/pycparser/issues/197 + # for discussion. + if method.__doc__ is not None: + _create_param_rules(cls, method) + elif not issued_nodoc_warning: + warnings.warn( + 'parsing methods must have __doc__ for pycparser to work properly', + RuntimeWarning, + stacklevel=2) + issued_nodoc_warning = True + return cls + + +def _create_param_rules(cls, func): + """ Create ply.yacc rules based on a parameterized rule function + + Generates new methods (one per each pair of parameters) based on the + template rule function `func`, and attaches them to `cls`. The rule + function's parameters must be accessible via its `_params` attribute. + """ + for xxx, yyy in func._params: + # Use the template method's body for each new method + def param_rule(self, p): + func(self, p) + + # Substitute in the params for the grammar rule and function name + param_rule.__doc__ = func.__doc__.replace('xxx', xxx).replace('yyy', yyy) + param_rule.__name__ = func.__name__.replace('xxx', xxx) + + # Attach the new method to the class + setattr(cls, param_rule.__name__, param_rule) diff --git a/venv/lib/python3.12/site-packages/pycparser/yacctab.py b/venv/lib/python3.12/site-packages/pycparser/yacctab.py new file mode 100644 index 00000000..832561dc --- /dev/null +++ b/venv/lib/python3.12/site-packages/pycparser/yacctab.py @@ -0,0 +1,374 @@ + +# yacctab.py +# This file is automatically generated. Do not edit. +_tabversion = '3.10' + +_lr_method = 'LALR' + +_lr_signature = 'translation_unit_or_emptyleftLORleftLANDleftORleftXORleftANDleftEQNEleftGTGELTLEleftRSHIFTLSHIFTleftPLUSMINUSleftTIMESDIVIDEMODAUTO BREAK CASE CHAR CONST CONTINUE DEFAULT DO DOUBLE ELSE ENUM EXTERN FLOAT FOR GOTO IF INLINE INT LONG REGISTER OFFSETOF RESTRICT RETURN SHORT SIGNED SIZEOF STATIC STRUCT SWITCH TYPEDEF UNION UNSIGNED VOID VOLATILE WHILE __INT128 _BOOL _COMPLEX _NORETURN _THREAD_LOCAL _STATIC_ASSERT _ATOMIC _ALIGNOF _ALIGNAS _PRAGMA ID TYPEID INT_CONST_DEC INT_CONST_OCT INT_CONST_HEX INT_CONST_BIN INT_CONST_CHAR FLOAT_CONST HEX_FLOAT_CONST CHAR_CONST WCHAR_CONST U8CHAR_CONST U16CHAR_CONST U32CHAR_CONST STRING_LITERAL WSTRING_LITERAL U8STRING_LITERAL U16STRING_LITERAL U32STRING_LITERAL PLUS MINUS TIMES DIVIDE MOD OR AND NOT XOR LSHIFT RSHIFT LOR LAND LNOT LT LE GT GE EQ NE EQUALS TIMESEQUAL DIVEQUAL MODEQUAL PLUSEQUAL MINUSEQUAL LSHIFTEQUAL RSHIFTEQUAL ANDEQUAL XOREQUAL OREQUAL PLUSPLUS MINUSMINUS ARROW CONDOP LPAREN RPAREN LBRACKET RBRACKET LBRACE RBRACE COMMA PERIOD SEMI COLON ELLIPSIS PPHASH PPPRAGMA PPPRAGMASTRabstract_declarator_opt : empty\n| abstract_declaratorassignment_expression_opt : empty\n| assignment_expressionblock_item_list_opt : empty\n| block_item_listdeclaration_list_opt : empty\n| declaration_listdeclaration_specifiers_no_type_opt : empty\n| declaration_specifiers_no_typedesignation_opt : empty\n| designationexpression_opt : empty\n| expressionid_init_declarator_list_opt : empty\n| id_init_declarator_listidentifier_list_opt : empty\n| identifier_listinit_declarator_list_opt : empty\n| init_declarator_listinitializer_list_opt : empty\n| initializer_listparameter_type_list_opt : empty\n| parameter_type_liststruct_declarator_list_opt : empty\n| struct_declarator_listtype_qualifier_list_opt : empty\n| type_qualifier_list direct_id_declarator : ID\n direct_id_declarator : LPAREN id_declarator RPAREN\n direct_id_declarator : direct_id_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET\n direct_id_declarator : direct_id_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET\n | direct_id_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET\n direct_id_declarator : direct_id_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET\n direct_id_declarator : direct_id_declarator LPAREN parameter_type_list RPAREN\n | direct_id_declarator LPAREN identifier_list_opt RPAREN\n direct_typeid_declarator : TYPEID\n direct_typeid_declarator : LPAREN typeid_declarator RPAREN\n direct_typeid_declarator : direct_typeid_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET\n direct_typeid_declarator : direct_typeid_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET\n | direct_typeid_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET\n direct_typeid_declarator : direct_typeid_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET\n direct_typeid_declarator : direct_typeid_declarator LPAREN parameter_type_list RPAREN\n | direct_typeid_declarator LPAREN identifier_list_opt RPAREN\n direct_typeid_noparen_declarator : TYPEID\n direct_typeid_noparen_declarator : direct_typeid_noparen_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET\n direct_typeid_noparen_declarator : direct_typeid_noparen_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET\n | direct_typeid_noparen_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET\n direct_typeid_noparen_declarator : direct_typeid_noparen_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET\n direct_typeid_noparen_declarator : direct_typeid_noparen_declarator LPAREN parameter_type_list RPAREN\n | direct_typeid_noparen_declarator LPAREN identifier_list_opt RPAREN\n id_declarator : direct_id_declarator\n id_declarator : pointer direct_id_declarator\n typeid_declarator : direct_typeid_declarator\n typeid_declarator : pointer direct_typeid_declarator\n typeid_noparen_declarator : direct_typeid_noparen_declarator\n typeid_noparen_declarator : pointer direct_typeid_noparen_declarator\n translation_unit_or_empty : translation_unit\n | empty\n translation_unit : external_declaration\n translation_unit : translation_unit external_declaration\n external_declaration : function_definition\n external_declaration : declaration\n external_declaration : pp_directive\n | pppragma_directive\n external_declaration : SEMI\n external_declaration : static_assert\n static_assert : _STATIC_ASSERT LPAREN constant_expression COMMA unified_string_literal RPAREN\n | _STATIC_ASSERT LPAREN constant_expression RPAREN\n pp_directive : PPHASH\n pppragma_directive : PPPRAGMA\n | PPPRAGMA PPPRAGMASTR\n | _PRAGMA LPAREN unified_string_literal RPAREN\n pppragma_directive_list : pppragma_directive\n | pppragma_directive_list pppragma_directive\n function_definition : id_declarator declaration_list_opt compound_statement\n function_definition : declaration_specifiers id_declarator declaration_list_opt compound_statement\n statement : labeled_statement\n | expression_statement\n | compound_statement\n | selection_statement\n | iteration_statement\n | jump_statement\n | pppragma_directive\n | static_assert\n pragmacomp_or_statement : pppragma_directive_list statement\n | statement\n decl_body : declaration_specifiers init_declarator_list_opt\n | declaration_specifiers_no_type id_init_declarator_list_opt\n declaration : decl_body SEMI\n declaration_list : declaration\n | declaration_list declaration\n declaration_specifiers_no_type : type_qualifier declaration_specifiers_no_type_opt\n declaration_specifiers_no_type : storage_class_specifier declaration_specifiers_no_type_opt\n declaration_specifiers_no_type : function_specifier declaration_specifiers_no_type_opt\n declaration_specifiers_no_type : atomic_specifier declaration_specifiers_no_type_opt\n declaration_specifiers_no_type : alignment_specifier declaration_specifiers_no_type_opt\n declaration_specifiers : declaration_specifiers type_qualifier\n declaration_specifiers : declaration_specifiers storage_class_specifier\n declaration_specifiers : declaration_specifiers function_specifier\n declaration_specifiers : declaration_specifiers type_specifier_no_typeid\n declaration_specifiers : type_specifier\n declaration_specifiers : declaration_specifiers_no_type type_specifier\n declaration_specifiers : declaration_specifiers alignment_specifier\n storage_class_specifier : AUTO\n | REGISTER\n | STATIC\n | EXTERN\n | TYPEDEF\n | _THREAD_LOCAL\n function_specifier : INLINE\n | _NORETURN\n type_specifier_no_typeid : VOID\n | _BOOL\n | CHAR\n | SHORT\n | INT\n | LONG\n | FLOAT\n | DOUBLE\n | _COMPLEX\n | SIGNED\n | UNSIGNED\n | __INT128\n type_specifier : typedef_name\n | enum_specifier\n | struct_or_union_specifier\n | type_specifier_no_typeid\n | atomic_specifier\n atomic_specifier : _ATOMIC LPAREN type_name RPAREN\n type_qualifier : CONST\n | RESTRICT\n | VOLATILE\n | _ATOMIC\n init_declarator_list : init_declarator\n | init_declarator_list COMMA init_declarator\n init_declarator : declarator\n | declarator EQUALS initializer\n id_init_declarator_list : id_init_declarator\n | id_init_declarator_list COMMA init_declarator\n id_init_declarator : id_declarator\n | id_declarator EQUALS initializer\n specifier_qualifier_list : specifier_qualifier_list type_specifier_no_typeid\n specifier_qualifier_list : specifier_qualifier_list type_qualifier\n specifier_qualifier_list : type_specifier\n specifier_qualifier_list : type_qualifier_list type_specifier\n specifier_qualifier_list : alignment_specifier\n specifier_qualifier_list : specifier_qualifier_list alignment_specifier\n struct_or_union_specifier : struct_or_union ID\n | struct_or_union TYPEID\n struct_or_union_specifier : struct_or_union brace_open struct_declaration_list brace_close\n | struct_or_union brace_open brace_close\n struct_or_union_specifier : struct_or_union ID brace_open struct_declaration_list brace_close\n | struct_or_union ID brace_open brace_close\n | struct_or_union TYPEID brace_open struct_declaration_list brace_close\n | struct_or_union TYPEID brace_open brace_close\n struct_or_union : STRUCT\n | UNION\n struct_declaration_list : struct_declaration\n | struct_declaration_list struct_declaration\n struct_declaration : specifier_qualifier_list struct_declarator_list_opt SEMI\n struct_declaration : SEMI\n struct_declaration : pppragma_directive\n struct_declarator_list : struct_declarator\n | struct_declarator_list COMMA struct_declarator\n struct_declarator : declarator\n struct_declarator : declarator COLON constant_expression\n | COLON constant_expression\n enum_specifier : ENUM ID\n | ENUM TYPEID\n enum_specifier : ENUM brace_open enumerator_list brace_close\n enum_specifier : ENUM ID brace_open enumerator_list brace_close\n | ENUM TYPEID brace_open enumerator_list brace_close\n enumerator_list : enumerator\n | enumerator_list COMMA\n | enumerator_list COMMA enumerator\n alignment_specifier : _ALIGNAS LPAREN type_name RPAREN\n | _ALIGNAS LPAREN constant_expression RPAREN\n enumerator : ID\n | ID EQUALS constant_expression\n declarator : id_declarator\n | typeid_declarator\n pointer : TIMES type_qualifier_list_opt\n | TIMES type_qualifier_list_opt pointer\n type_qualifier_list : type_qualifier\n | type_qualifier_list type_qualifier\n parameter_type_list : parameter_list\n | parameter_list COMMA ELLIPSIS\n parameter_list : parameter_declaration\n | parameter_list COMMA parameter_declaration\n parameter_declaration : declaration_specifiers id_declarator\n | declaration_specifiers typeid_noparen_declarator\n parameter_declaration : declaration_specifiers abstract_declarator_opt\n identifier_list : identifier\n | identifier_list COMMA identifier\n initializer : assignment_expression\n initializer : brace_open initializer_list_opt brace_close\n | brace_open initializer_list COMMA brace_close\n initializer_list : designation_opt initializer\n | initializer_list COMMA designation_opt initializer\n designation : designator_list EQUALS\n designator_list : designator\n | designator_list designator\n designator : LBRACKET constant_expression RBRACKET\n | PERIOD identifier\n type_name : specifier_qualifier_list abstract_declarator_opt\n abstract_declarator : pointer\n abstract_declarator : pointer direct_abstract_declarator\n abstract_declarator : direct_abstract_declarator\n direct_abstract_declarator : LPAREN abstract_declarator RPAREN direct_abstract_declarator : direct_abstract_declarator LBRACKET assignment_expression_opt RBRACKET\n direct_abstract_declarator : LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET\n direct_abstract_declarator : direct_abstract_declarator LBRACKET TIMES RBRACKET\n direct_abstract_declarator : LBRACKET TIMES RBRACKET\n direct_abstract_declarator : direct_abstract_declarator LPAREN parameter_type_list_opt RPAREN\n direct_abstract_declarator : LPAREN parameter_type_list_opt RPAREN\n direct_abstract_declarator : LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET\n | LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET\n block_item : declaration\n | statement\n block_item_list : block_item\n | block_item_list block_item\n compound_statement : brace_open block_item_list_opt brace_close labeled_statement : ID COLON pragmacomp_or_statement labeled_statement : CASE constant_expression COLON pragmacomp_or_statement labeled_statement : DEFAULT COLON pragmacomp_or_statement labeled_statement : ID COLON labeled_statement : CASE constant_expression COLON labeled_statement : DEFAULT COLON selection_statement : IF LPAREN expression RPAREN pragmacomp_or_statement selection_statement : IF LPAREN expression RPAREN statement ELSE pragmacomp_or_statement selection_statement : SWITCH LPAREN expression RPAREN pragmacomp_or_statement iteration_statement : WHILE LPAREN expression RPAREN pragmacomp_or_statement iteration_statement : DO pragmacomp_or_statement WHILE LPAREN expression RPAREN SEMI iteration_statement : FOR LPAREN expression_opt SEMI expression_opt SEMI expression_opt RPAREN pragmacomp_or_statement iteration_statement : FOR LPAREN declaration expression_opt SEMI expression_opt RPAREN pragmacomp_or_statement jump_statement : GOTO ID SEMI jump_statement : BREAK SEMI jump_statement : CONTINUE SEMI jump_statement : RETURN expression SEMI\n | RETURN SEMI\n expression_statement : expression_opt SEMI expression : assignment_expression\n | expression COMMA assignment_expression\n assignment_expression : LPAREN compound_statement RPAREN typedef_name : TYPEID assignment_expression : conditional_expression\n | unary_expression assignment_operator assignment_expression\n assignment_operator : EQUALS\n | XOREQUAL\n | TIMESEQUAL\n | DIVEQUAL\n | MODEQUAL\n | PLUSEQUAL\n | MINUSEQUAL\n | LSHIFTEQUAL\n | RSHIFTEQUAL\n | ANDEQUAL\n | OREQUAL\n constant_expression : conditional_expression conditional_expression : binary_expression\n | binary_expression CONDOP expression COLON conditional_expression\n binary_expression : cast_expression\n | binary_expression TIMES binary_expression\n | binary_expression DIVIDE binary_expression\n | binary_expression MOD binary_expression\n | binary_expression PLUS binary_expression\n | binary_expression MINUS binary_expression\n | binary_expression RSHIFT binary_expression\n | binary_expression LSHIFT binary_expression\n | binary_expression LT binary_expression\n | binary_expression LE binary_expression\n | binary_expression GE binary_expression\n | binary_expression GT binary_expression\n | binary_expression EQ binary_expression\n | binary_expression NE binary_expression\n | binary_expression AND binary_expression\n | binary_expression OR binary_expression\n | binary_expression XOR binary_expression\n | binary_expression LAND binary_expression\n | binary_expression LOR binary_expression\n cast_expression : unary_expression cast_expression : LPAREN type_name RPAREN cast_expression unary_expression : postfix_expression unary_expression : PLUSPLUS unary_expression\n | MINUSMINUS unary_expression\n | unary_operator cast_expression\n unary_expression : SIZEOF unary_expression\n | SIZEOF LPAREN type_name RPAREN\n | _ALIGNOF LPAREN type_name RPAREN\n unary_operator : AND\n | TIMES\n | PLUS\n | MINUS\n | NOT\n | LNOT\n postfix_expression : primary_expression postfix_expression : postfix_expression LBRACKET expression RBRACKET postfix_expression : postfix_expression LPAREN argument_expression_list RPAREN\n | postfix_expression LPAREN RPAREN\n postfix_expression : postfix_expression PERIOD ID\n | postfix_expression PERIOD TYPEID\n | postfix_expression ARROW ID\n | postfix_expression ARROW TYPEID\n postfix_expression : postfix_expression PLUSPLUS\n | postfix_expression MINUSMINUS\n postfix_expression : LPAREN type_name RPAREN brace_open initializer_list brace_close\n | LPAREN type_name RPAREN brace_open initializer_list COMMA brace_close\n primary_expression : identifier primary_expression : constant primary_expression : unified_string_literal\n | unified_wstring_literal\n primary_expression : LPAREN expression RPAREN primary_expression : OFFSETOF LPAREN type_name COMMA offsetof_member_designator RPAREN\n offsetof_member_designator : identifier\n | offsetof_member_designator PERIOD identifier\n | offsetof_member_designator LBRACKET expression RBRACKET\n argument_expression_list : assignment_expression\n | argument_expression_list COMMA assignment_expression\n identifier : ID constant : INT_CONST_DEC\n | INT_CONST_OCT\n | INT_CONST_HEX\n | INT_CONST_BIN\n | INT_CONST_CHAR\n constant : FLOAT_CONST\n | HEX_FLOAT_CONST\n constant : CHAR_CONST\n | WCHAR_CONST\n | U8CHAR_CONST\n | U16CHAR_CONST\n | U32CHAR_CONST\n unified_string_literal : STRING_LITERAL\n | unified_string_literal STRING_LITERAL\n unified_wstring_literal : WSTRING_LITERAL\n | U8STRING_LITERAL\n | U16STRING_LITERAL\n | U32STRING_LITERAL\n | unified_wstring_literal WSTRING_LITERAL\n | unified_wstring_literal U8STRING_LITERAL\n | unified_wstring_literal U16STRING_LITERAL\n | unified_wstring_literal U32STRING_LITERAL\n brace_open : LBRACE\n brace_close : RBRACE\n empty : ' + +_lr_action_items = {'$end':([0,1,2,3,4,5,6,7,8,9,10,14,15,64,90,91,127,208,251,262,267,355,501,],[-345,0,-58,-59,-60,-62,-63,-64,-65,-66,-67,-70,-71,-61,-90,-72,-76,-344,-77,-73,-69,-223,-68,]),'SEMI':([0,2,4,5,6,7,8,9,10,12,13,14,15,19,21,22,23,24,25,26,27,28,29,31,32,33,34,35,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,64,69,70,71,72,73,74,75,76,77,78,79,81,83,84,85,86,87,88,89,90,91,97,98,99,100,101,102,103,104,105,106,107,108,110,111,112,117,118,119,121,122,123,124,127,128,130,132,139,140,143,144,145,146,151,152,158,159,162,163,164,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,203,204,205,206,207,208,209,210,211,212,214,220,221,222,223,224,225,226,227,228,229,230,231,232,233,236,239,242,245,246,247,248,249,250,251,252,253,254,255,262,263,267,291,292,293,295,296,297,300,301,302,303,311,312,326,327,330,333,334,335,336,337,338,339,340,341,342,343,344,345,346,348,349,353,354,355,356,357,358,360,361,369,370,371,372,373,374,375,376,377,403,404,406,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,429,431,432,433,434,439,440,461,462,465,466,467,470,471,472,473,475,477,481,482,483,484,485,486,487,488,495,496,499,501,503,504,507,508,510,511,526,527,528,529,530,531,533,534,535,539,540,542,558,559,560,561,562,564,567,569,578,579,582,587,588,590,592,593,594,],[9,9,-60,-62,-63,-64,-65,-66,-67,-345,90,-70,-71,-52,-345,-345,-345,-128,-102,-345,-345,-29,-107,-125,-126,-127,-129,-246,-113,-114,-115,-116,-117,-118,-119,-120,-121,-122,-123,-124,-134,-131,-132,-133,-105,-106,-108,-109,-110,-111,-112,-61,-345,-345,-129,-134,-181,-98,-99,-100,-101,-104,-88,-134,-19,-20,-135,-137,-182,-54,-37,-90,-72,-53,-93,-9,-10,-345,-94,-95,-103,-89,-129,-15,-16,-139,-141,-97,-96,-169,-170,-343,-149,-150,210,-76,-345,-181,-55,-333,-30,-311,-260,-261,-263,-282,-284,-297,-320,-309,-310,-312,-321,-322,-323,-324,-325,-326,-327,-328,-329,-330,-331,-332,-335,-336,-337,-338,210,210,210,-152,-159,-344,-345,-162,-163,-145,-147,-13,-345,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,-320,361,-14,-345,374,375,377,-243,-247,-282,-77,-38,-136,-138,-196,-73,-334,-69,-305,-306,-285,-286,-287,-288,-339,-340,-341,-342,-35,-36,-140,-142,-171,210,-154,210,-156,-151,-160,467,-143,-144,-148,-25,-26,-164,-166,-146,-130,-177,-178,-223,-222,-13,-227,-229,-242,-345,-87,-74,-345,485,-238,-239,486,-241,-43,-44,-313,-264,-265,-266,-267,-268,-269,-270,-271,-272,-273,-274,-275,-276,-277,-278,-279,-280,-281,-300,-301,-302,-303,-304,-31,-34,-172,-173,-153,-155,-161,-168,-224,-228,-226,-245,-244,-86,-75,533,-345,-237,-240,-248,-197,-39,-42,-283,-68,-298,-299,-289,-290,-32,-33,-165,-167,-225,-345,-345,-345,-345,565,-198,-40,-41,-262,-230,-87,-74,-232,-233,580,-307,-314,-345,588,-308,-231,-234,-345,-345,-236,-235,]),'PPHASH':([0,2,4,5,6,7,8,9,10,14,15,64,90,91,127,208,251,262,267,355,501,],[14,14,-60,-62,-63,-64,-65,-66,-67,-70,-71,-61,-90,-72,-76,-344,-77,-73,-69,-223,-68,]),'PPPRAGMA':([0,2,4,5,6,7,8,9,10,14,15,64,90,91,121,124,127,128,203,204,205,207,208,210,211,221,222,223,224,225,226,227,228,229,230,231,232,242,251,262,267,333,335,338,355,356,358,360,361,369,370,371,374,375,377,467,471,472,473,481,482,485,486,501,528,529,530,531,558,559,560,561,562,578,587,588,590,592,593,594,],[15,15,-60,-62,-63,-64,-65,-66,-67,-70,-71,-61,-90,-72,-343,15,-76,15,15,15,15,-159,-344,-162,-163,15,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,15,-77,-73,-69,15,15,-160,-223,-222,15,15,-242,15,-87,-74,-238,-239,-241,-161,-224,15,-226,-86,-75,-237,-240,-68,-225,15,15,15,-230,-87,-74,-232,-233,15,-231,-234,15,15,-236,-235,]),'_PRAGMA':([0,2,4,5,6,7,8,9,10,14,15,64,90,91,121,124,127,128,203,204,205,207,208,210,211,221,222,223,224,225,226,227,228,229,230,231,232,242,251,262,267,333,335,338,355,356,358,360,361,369,370,371,374,375,377,467,471,472,473,481,482,485,486,501,528,529,530,531,558,559,560,561,562,578,587,588,590,592,593,594,],[16,16,-60,-62,-63,-64,-65,-66,-67,-70,-71,-61,-90,-72,-343,16,-76,16,16,16,16,-159,-344,-162,-163,16,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,16,-77,-73,-69,16,16,-160,-223,-222,16,16,-242,16,-87,-74,-238,-239,-241,-161,-224,16,-226,-86,-75,-237,-240,-68,-225,16,16,16,-230,-87,-74,-232,-233,16,-231,-234,16,16,-236,-235,]),'_STATIC_ASSERT':([0,2,4,5,6,7,8,9,10,14,15,64,90,91,121,127,128,208,221,222,223,224,225,226,227,228,229,230,231,232,242,251,262,267,355,356,358,360,361,369,370,371,374,375,377,471,472,473,481,482,485,486,501,528,529,530,531,558,559,560,561,562,578,587,588,590,592,593,594,],[18,18,-60,-62,-63,-64,-65,-66,-67,-70,-71,-61,-90,-72,-343,-76,18,-344,18,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,18,-77,-73,-69,-223,-222,18,18,-242,18,-87,-74,-238,-239,-241,-224,18,-226,-86,-75,-237,-240,-68,-225,18,18,18,-230,-87,-74,-232,-233,18,-231,-234,18,18,-236,-235,]),'ID':([0,2,4,5,6,7,8,9,10,12,14,15,17,20,21,22,23,24,25,26,27,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,62,63,64,69,70,71,72,74,75,76,77,78,80,81,82,90,91,94,95,96,98,99,100,101,102,103,104,106,112,113,114,115,116,117,118,119,120,121,122,123,126,127,128,134,135,136,137,141,147,148,149,150,153,154,155,156,160,161,182,183,184,192,194,195,196,197,198,199,206,208,209,212,214,221,222,223,224,225,226,227,228,229,230,231,232,234,238,242,244,247,251,256,257,258,259,262,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,294,298,306,309,310,314,318,322,323,330,331,332,334,336,337,340,341,342,347,348,349,353,354,355,356,358,360,361,362,365,366,367,369,370,371,372,374,375,377,378,379,380,381,382,383,384,385,386,387,388,389,392,393,394,397,398,400,401,402,405,448,449,452,455,457,458,459,461,462,465,466,468,469,471,472,473,476,481,482,484,485,486,489,491,500,501,502,505,509,512,513,514,521,522,524,528,529,530,531,532,533,536,537,547,548,549,558,559,560,561,562,565,568,570,571,578,580,587,588,590,592,593,594,],[28,28,-60,-62,-63,-64,-65,-66,-67,28,-70,-71,28,28,-345,-345,-345,-128,-102,28,-345,-107,-345,-125,-126,-127,-129,-246,118,122,-113,-114,-115,-116,-117,-118,-119,-120,-121,-122,-123,-124,-134,-131,-132,-133,-105,-106,-108,-109,-110,-111,-112,-157,-158,-61,28,28,-129,-134,-98,-99,-100,-101,-104,28,-134,28,-90,-72,159,-345,159,-93,-9,-10,-345,-94,-95,-103,-129,-97,-183,-27,-28,-185,-96,-169,-170,202,-343,-149,-150,159,-76,233,28,159,-345,159,159,-292,-293,-294,-291,159,159,159,159,-295,-296,159,-345,-28,28,28,159,-184,-186,202,202,-152,-344,28,-145,-147,233,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,159,159,233,373,159,-77,-345,159,-345,-28,-73,-69,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,431,433,159,159,-292,159,159,159,28,28,-345,-171,202,159,-154,-156,-151,-143,-144,-148,159,-146,-130,-177,-178,-223,-222,233,233,-242,159,159,159,159,233,-87,-74,159,-238,-239,-241,159,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-11,159,-12,159,159,-292,159,159,159,-345,159,28,159,-345,-28,159,-172,-173,-153,-155,28,159,-224,233,-226,159,-86,-75,159,-237,-240,-345,-201,-345,-68,159,159,159,159,-345,-28,159,159,-292,-225,233,233,233,159,159,159,-11,-292,159,159,-230,-87,-74,-232,-233,159,-345,159,159,233,159,-231,-234,233,233,-236,-235,]),'LPAREN':([0,2,4,5,6,7,8,9,10,12,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,64,69,70,71,72,74,75,76,77,78,80,81,82,88,89,90,91,94,95,97,98,99,100,101,102,103,104,106,109,112,113,114,115,116,117,118,119,121,122,123,126,127,128,132,134,135,136,139,140,141,143,147,148,149,150,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,192,194,195,196,197,206,208,209,212,214,216,221,222,223,224,225,226,227,228,229,230,231,232,233,234,237,238,240,241,242,243,247,251,252,256,257,258,259,262,263,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,291,292,294,298,300,301,302,303,306,309,310,311,312,318,319,322,323,324,325,330,332,334,336,337,340,341,342,347,348,349,351,352,353,354,355,356,358,360,361,362,365,366,367,369,370,371,372,374,375,377,378,379,380,381,382,383,384,385,386,387,388,389,392,393,394,397,400,401,402,403,404,405,406,429,431,432,433,434,439,440,446,447,448,452,455,457,458,459,461,462,465,466,468,469,471,472,473,476,480,481,482,484,485,486,489,491,495,496,500,501,502,503,504,505,510,511,512,513,514,517,518,520,521,522,524,528,529,530,531,532,533,536,537,539,540,547,548,549,550,551,552,555,556,557,558,559,560,561,562,565,567,568,569,571,572,573,576,577,578,580,582,585,586,587,588,590,592,593,594,],[17,17,-60,-62,-63,-64,-65,-66,-67,82,-70,-71,92,17,94,96,17,-345,-345,-345,-128,-102,17,-345,-29,-107,-345,-125,-126,-127,-129,-246,-113,-114,-115,-116,-117,-118,-119,-120,-121,-122,-123,-124,125,-131,-132,-133,-105,-106,-108,-109,-110,-111,-112,126,-61,82,17,-129,125,-98,-99,-100,-101,-104,82,-134,82,137,-37,-90,-72,141,-345,96,-93,-9,-10,-345,-94,-95,-103,-129,125,-97,-183,-27,-28,-185,-96,-169,-170,-343,-149,-150,141,-76,238,137,82,238,-345,-333,-30,238,-311,-292,-293,-294,-291,288,294,294,141,298,299,-297,-320,-295,-296,-309,-310,-312,304,-321,-322,-323,-324,-325,-326,-327,-328,-329,-330,-331,-332,-335,-336,-337,-338,238,-345,-28,322,82,238,-184,-186,-152,-344,82,-145,-147,351,238,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,-320,141,362,238,366,367,238,372,238,-77,-38,-345,238,-345,-28,-73,-334,-69,238,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,238,238,-305,-306,238,238,-339,-340,-341,-342,-292,238,238,-35,-36,322,449,322,-345,-45,460,-171,141,-154,-156,-151,-143,-144,-148,141,-146,-130,351,351,-177,-178,-223,-222,238,238,-242,238,238,238,238,238,-87,-74,238,-238,-239,-241,238,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-11,238,-12,141,-292,238,238,-43,-44,141,-313,-300,-301,-302,-303,-304,-31,-34,449,460,-345,322,238,-345,-28,238,-172,-173,-153,-155,82,141,-224,238,-226,141,532,-86,-75,238,-237,-240,-345,-201,-39,-42,-345,-68,141,-298,-299,238,-32,-33,238,-345,-28,-210,-216,-214,238,238,-292,-225,238,238,238,238,238,238,-11,-40,-41,-292,238,238,-50,-51,-212,-211,-213,-215,-230,-87,-74,-232,-233,238,-307,-345,-314,238,-46,-49,-217,-218,238,238,-308,-47,-48,-231,-234,238,238,-236,-235,]),'TIMES':([0,2,4,5,6,7,8,9,10,12,14,15,17,21,22,23,24,25,26,27,29,30,31,32,33,34,35,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,64,69,70,71,72,74,75,76,77,78,81,82,90,91,94,95,98,99,100,101,102,103,104,106,112,113,114,115,116,117,118,119,121,122,123,126,127,128,134,135,136,139,141,143,145,146,147,148,149,150,151,152,153,154,155,156,158,159,160,161,162,163,164,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,192,194,195,197,206,208,209,212,214,216,221,222,223,224,225,226,227,228,229,230,231,232,233,234,238,242,247,250,251,256,257,258,259,262,263,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,291,292,293,294,295,296,297,298,300,301,302,303,306,309,310,322,323,330,332,334,336,337,340,341,342,347,348,349,351,353,354,355,356,358,360,361,362,365,366,367,369,370,371,372,374,375,377,378,379,380,381,382,383,384,385,386,387,388,389,392,393,394,397,400,401,402,405,406,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,429,431,432,433,434,448,455,457,458,459,461,462,465,466,468,469,471,472,473,476,481,482,484,485,486,489,491,499,500,501,502,503,504,505,507,508,512,513,514,521,522,524,528,529,530,531,532,533,536,537,547,548,549,558,559,560,561,562,565,567,568,569,571,578,580,582,587,588,590,592,593,594,],[30,30,-60,-62,-63,-64,-65,-66,-67,30,-70,-71,30,-345,-345,-345,-128,-102,30,-345,-107,-345,-125,-126,-127,-129,-246,-113,-114,-115,-116,-117,-118,-119,-120,-121,-122,-123,-124,-134,-131,-132,-133,-105,-106,-108,-109,-110,-111,-112,-61,30,30,-129,-134,-98,-99,-100,-101,-104,-134,30,-90,-72,147,-345,-93,-9,-10,-345,-94,-95,-103,-129,-97,30,-27,-28,-185,-96,-169,-170,-343,-149,-150,147,-76,147,30,147,-345,-333,147,-311,269,-263,-292,-293,-294,-291,-282,-284,147,147,147,147,-297,-320,-295,-296,-309,-310,-312,-321,-322,-323,-324,-325,-326,-327,-328,-329,-330,-331,-332,-335,-336,-337,-338,306,-345,-28,30,30,147,-186,-152,-344,30,-145,-147,30,147,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,-320,147,147,147,147,-282,-77,-345,400,-345,-28,-73,-334,-69,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,-305,-306,-285,147,-286,-287,-288,147,-339,-340,-341,-342,-292,147,147,30,456,-171,147,-154,-156,-151,-143,-144,-148,147,-146,-130,30,-177,-178,-223,-222,147,147,-242,147,147,147,147,147,-87,-74,147,-238,-239,-241,147,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-11,147,-12,147,-292,147,147,147,-313,-264,-265,-266,269,269,269,269,269,269,269,269,269,269,269,269,269,269,269,-300,-301,-302,-303,-304,-345,147,-345,-28,524,-172,-173,-153,-155,30,147,-224,147,-226,147,-86,-75,147,-237,-240,-345,-201,-283,-345,-68,147,-298,-299,147,-289,-290,547,-345,-28,147,147,-292,-225,147,147,147,147,147,147,-11,-292,147,147,-230,-87,-74,-232,-233,147,-307,-345,-314,147,147,147,-308,-231,-234,147,147,-236,-235,]),'TYPEID':([0,2,4,5,6,7,8,9,10,11,12,14,15,19,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,62,63,64,67,68,69,70,71,72,73,74,75,76,77,78,80,81,82,90,91,96,97,98,99,100,101,102,103,104,106,112,113,114,115,116,117,118,119,121,122,123,124,125,126,127,128,129,134,137,140,141,192,193,194,196,197,203,204,205,206,207,208,209,210,211,212,213,214,221,222,223,224,225,226,227,228,229,230,231,232,238,251,262,267,289,290,294,298,299,304,311,312,313,318,322,330,333,334,335,336,337,338,340,341,342,348,349,351,353,354,355,356,358,360,361,370,371,372,374,375,377,439,440,449,460,461,462,465,466,467,468,471,472,473,481,482,485,486,501,510,511,528,558,559,560,561,562,587,588,593,594,],[35,35,-60,-62,-63,-64,-65,-66,-67,35,89,-70,-71,-52,-345,-345,-345,-128,-102,35,-345,-29,-107,-345,-125,-126,-127,-129,-246,119,123,-113,-114,-115,-116,-117,-118,-119,-120,-121,-122,-123,-124,-134,-131,-132,-133,-105,-106,-108,-109,-110,-111,-112,-157,-158,-61,35,-91,89,35,-129,-134,35,-98,-99,-100,-101,-104,89,-134,89,-90,-72,35,-53,-93,-9,-10,-345,-94,-95,-103,-129,-97,-183,-27,-28,-185,-96,-169,-170,-343,-149,-150,35,35,35,-76,35,-92,89,35,-30,35,324,35,89,-184,-186,35,35,35,-152,-159,-344,89,-162,-163,-145,35,-147,35,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,35,-77,-73,-69,432,434,35,35,35,35,-35,-36,35,324,35,-171,35,-154,35,-156,-151,-160,-143,-144,-148,-146,-130,35,-177,-178,-223,-222,-227,-229,-242,-87,-84,35,-238,-239,-241,-31,-34,35,35,-172,-173,-153,-155,-161,89,-224,-228,-226,-86,-84,-237,-240,-68,-32,-33,-225,-230,-87,-84,-232,-233,-231,-234,-236,-235,]),'ENUM':([0,2,4,5,6,7,8,9,10,11,14,15,19,21,22,23,26,27,28,29,34,50,51,52,53,54,55,56,57,58,59,60,64,67,68,70,71,72,73,90,91,96,97,98,99,100,101,102,103,112,116,117,121,124,125,126,127,128,129,137,140,141,193,197,203,204,205,207,208,210,211,213,221,222,223,224,225,226,227,228,229,230,231,232,238,251,262,267,294,298,299,304,311,312,313,322,333,335,338,349,351,353,354,355,356,358,360,361,370,371,372,374,375,377,439,440,449,460,467,471,472,473,481,482,485,486,501,510,511,528,558,559,560,561,562,587,588,593,594,],[36,36,-60,-62,-63,-64,-65,-66,-67,36,-70,-71,-52,-345,-345,-345,36,-345,-29,-107,-345,-134,-131,-132,-133,-105,-106,-108,-109,-110,-111,-112,-61,36,-91,36,-345,-134,36,-90,-72,36,-53,-93,-9,-10,-345,-94,-95,-97,-185,-96,-343,36,36,36,-76,36,-92,36,-30,36,36,-186,36,36,36,-159,-344,-162,-163,36,36,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,36,-77,-73,-69,36,36,36,36,-35,-36,36,36,36,36,-160,-130,36,-177,-178,-223,-222,-227,-229,-242,-87,-84,36,-238,-239,-241,-31,-34,36,36,-161,-224,-228,-226,-86,-84,-237,-240,-68,-32,-33,-225,-230,-87,-84,-232,-233,-231,-234,-236,-235,]),'VOID':([0,2,4,5,6,7,8,9,10,11,12,14,15,19,21,22,23,24,25,26,27,28,29,31,32,33,34,35,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,64,67,68,69,70,71,72,73,74,75,76,77,78,81,90,91,96,97,98,99,100,101,102,103,104,106,112,116,117,118,119,121,122,123,124,125,126,127,128,129,137,140,141,192,193,197,203,204,205,206,207,208,209,210,211,212,213,214,216,221,222,223,224,225,226,227,228,229,230,231,232,238,251,262,267,294,298,299,304,311,312,313,322,330,333,334,335,336,337,338,340,341,342,348,349,351,353,354,355,356,358,360,361,370,371,372,374,375,377,439,440,449,460,461,462,465,466,467,471,472,473,481,482,485,486,501,510,511,528,558,559,560,561,562,587,588,593,594,],[38,38,-60,-62,-63,-64,-65,-66,-67,38,38,-70,-71,-52,-345,-345,-345,-128,-102,38,-345,-29,-107,-125,-126,-127,-129,-246,-113,-114,-115,-116,-117,-118,-119,-120,-121,-122,-123,-124,-134,-131,-132,-133,-105,-106,-108,-109,-110,-111,-112,-61,38,-91,38,38,-129,-134,38,-98,-99,-100,-101,-104,-134,-90,-72,38,-53,-93,-9,-10,-345,-94,-95,-103,-129,-97,-185,-96,-169,-170,-343,-149,-150,38,38,38,-76,38,-92,38,-30,38,38,38,-186,38,38,38,-152,-159,-344,38,-162,-163,-145,38,-147,38,38,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,38,-77,-73,-69,38,38,38,38,-35,-36,38,38,-171,38,-154,38,-156,-151,-160,-143,-144,-148,-146,-130,38,-177,-178,-223,-222,-227,-229,-242,-87,-84,38,-238,-239,-241,-31,-34,38,38,-172,-173,-153,-155,-161,-224,-228,-226,-86,-84,-237,-240,-68,-32,-33,-225,-230,-87,-84,-232,-233,-231,-234,-236,-235,]),'_BOOL':([0,2,4,5,6,7,8,9,10,11,12,14,15,19,21,22,23,24,25,26,27,28,29,31,32,33,34,35,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,64,67,68,69,70,71,72,73,74,75,76,77,78,81,90,91,96,97,98,99,100,101,102,103,104,106,112,116,117,118,119,121,122,123,124,125,126,127,128,129,137,140,141,192,193,197,203,204,205,206,207,208,209,210,211,212,213,214,216,221,222,223,224,225,226,227,228,229,230,231,232,238,251,262,267,294,298,299,304,311,312,313,322,330,333,334,335,336,337,338,340,341,342,348,349,351,353,354,355,356,358,360,361,370,371,372,374,375,377,439,440,449,460,461,462,465,466,467,471,472,473,481,482,485,486,501,510,511,528,558,559,560,561,562,587,588,593,594,],[39,39,-60,-62,-63,-64,-65,-66,-67,39,39,-70,-71,-52,-345,-345,-345,-128,-102,39,-345,-29,-107,-125,-126,-127,-129,-246,-113,-114,-115,-116,-117,-118,-119,-120,-121,-122,-123,-124,-134,-131,-132,-133,-105,-106,-108,-109,-110,-111,-112,-61,39,-91,39,39,-129,-134,39,-98,-99,-100,-101,-104,-134,-90,-72,39,-53,-93,-9,-10,-345,-94,-95,-103,-129,-97,-185,-96,-169,-170,-343,-149,-150,39,39,39,-76,39,-92,39,-30,39,39,39,-186,39,39,39,-152,-159,-344,39,-162,-163,-145,39,-147,39,39,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,39,-77,-73,-69,39,39,39,39,-35,-36,39,39,-171,39,-154,39,-156,-151,-160,-143,-144,-148,-146,-130,39,-177,-178,-223,-222,-227,-229,-242,-87,-84,39,-238,-239,-241,-31,-34,39,39,-172,-173,-153,-155,-161,-224,-228,-226,-86,-84,-237,-240,-68,-32,-33,-225,-230,-87,-84,-232,-233,-231,-234,-236,-235,]),'CHAR':([0,2,4,5,6,7,8,9,10,11,12,14,15,19,21,22,23,24,25,26,27,28,29,31,32,33,34,35,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,64,67,68,69,70,71,72,73,74,75,76,77,78,81,90,91,96,97,98,99,100,101,102,103,104,106,112,116,117,118,119,121,122,123,124,125,126,127,128,129,137,140,141,192,193,197,203,204,205,206,207,208,209,210,211,212,213,214,216,221,222,223,224,225,226,227,228,229,230,231,232,238,251,262,267,294,298,299,304,311,312,313,322,330,333,334,335,336,337,338,340,341,342,348,349,351,353,354,355,356,358,360,361,370,371,372,374,375,377,439,440,449,460,461,462,465,466,467,471,472,473,481,482,485,486,501,510,511,528,558,559,560,561,562,587,588,593,594,],[40,40,-60,-62,-63,-64,-65,-66,-67,40,40,-70,-71,-52,-345,-345,-345,-128,-102,40,-345,-29,-107,-125,-126,-127,-129,-246,-113,-114,-115,-116,-117,-118,-119,-120,-121,-122,-123,-124,-134,-131,-132,-133,-105,-106,-108,-109,-110,-111,-112,-61,40,-91,40,40,-129,-134,40,-98,-99,-100,-101,-104,-134,-90,-72,40,-53,-93,-9,-10,-345,-94,-95,-103,-129,-97,-185,-96,-169,-170,-343,-149,-150,40,40,40,-76,40,-92,40,-30,40,40,40,-186,40,40,40,-152,-159,-344,40,-162,-163,-145,40,-147,40,40,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,40,-77,-73,-69,40,40,40,40,-35,-36,40,40,-171,40,-154,40,-156,-151,-160,-143,-144,-148,-146,-130,40,-177,-178,-223,-222,-227,-229,-242,-87,-84,40,-238,-239,-241,-31,-34,40,40,-172,-173,-153,-155,-161,-224,-228,-226,-86,-84,-237,-240,-68,-32,-33,-225,-230,-87,-84,-232,-233,-231,-234,-236,-235,]),'SHORT':([0,2,4,5,6,7,8,9,10,11,12,14,15,19,21,22,23,24,25,26,27,28,29,31,32,33,34,35,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,64,67,68,69,70,71,72,73,74,75,76,77,78,81,90,91,96,97,98,99,100,101,102,103,104,106,112,116,117,118,119,121,122,123,124,125,126,127,128,129,137,140,141,192,193,197,203,204,205,206,207,208,209,210,211,212,213,214,216,221,222,223,224,225,226,227,228,229,230,231,232,238,251,262,267,294,298,299,304,311,312,313,322,330,333,334,335,336,337,338,340,341,342,348,349,351,353,354,355,356,358,360,361,370,371,372,374,375,377,439,440,449,460,461,462,465,466,467,471,472,473,481,482,485,486,501,510,511,528,558,559,560,561,562,587,588,593,594,],[41,41,-60,-62,-63,-64,-65,-66,-67,41,41,-70,-71,-52,-345,-345,-345,-128,-102,41,-345,-29,-107,-125,-126,-127,-129,-246,-113,-114,-115,-116,-117,-118,-119,-120,-121,-122,-123,-124,-134,-131,-132,-133,-105,-106,-108,-109,-110,-111,-112,-61,41,-91,41,41,-129,-134,41,-98,-99,-100,-101,-104,-134,-90,-72,41,-53,-93,-9,-10,-345,-94,-95,-103,-129,-97,-185,-96,-169,-170,-343,-149,-150,41,41,41,-76,41,-92,41,-30,41,41,41,-186,41,41,41,-152,-159,-344,41,-162,-163,-145,41,-147,41,41,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,41,-77,-73,-69,41,41,41,41,-35,-36,41,41,-171,41,-154,41,-156,-151,-160,-143,-144,-148,-146,-130,41,-177,-178,-223,-222,-227,-229,-242,-87,-84,41,-238,-239,-241,-31,-34,41,41,-172,-173,-153,-155,-161,-224,-228,-226,-86,-84,-237,-240,-68,-32,-33,-225,-230,-87,-84,-232,-233,-231,-234,-236,-235,]),'INT':([0,2,4,5,6,7,8,9,10,11,12,14,15,19,21,22,23,24,25,26,27,28,29,31,32,33,34,35,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,64,67,68,69,70,71,72,73,74,75,76,77,78,81,90,91,96,97,98,99,100,101,102,103,104,106,112,116,117,118,119,121,122,123,124,125,126,127,128,129,137,140,141,192,193,197,203,204,205,206,207,208,209,210,211,212,213,214,216,221,222,223,224,225,226,227,228,229,230,231,232,238,251,262,267,294,298,299,304,311,312,313,322,330,333,334,335,336,337,338,340,341,342,348,349,351,353,354,355,356,358,360,361,370,371,372,374,375,377,439,440,449,460,461,462,465,466,467,471,472,473,481,482,485,486,501,510,511,528,558,559,560,561,562,587,588,593,594,],[42,42,-60,-62,-63,-64,-65,-66,-67,42,42,-70,-71,-52,-345,-345,-345,-128,-102,42,-345,-29,-107,-125,-126,-127,-129,-246,-113,-114,-115,-116,-117,-118,-119,-120,-121,-122,-123,-124,-134,-131,-132,-133,-105,-106,-108,-109,-110,-111,-112,-61,42,-91,42,42,-129,-134,42,-98,-99,-100,-101,-104,-134,-90,-72,42,-53,-93,-9,-10,-345,-94,-95,-103,-129,-97,-185,-96,-169,-170,-343,-149,-150,42,42,42,-76,42,-92,42,-30,42,42,42,-186,42,42,42,-152,-159,-344,42,-162,-163,-145,42,-147,42,42,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,42,-77,-73,-69,42,42,42,42,-35,-36,42,42,-171,42,-154,42,-156,-151,-160,-143,-144,-148,-146,-130,42,-177,-178,-223,-222,-227,-229,-242,-87,-84,42,-238,-239,-241,-31,-34,42,42,-172,-173,-153,-155,-161,-224,-228,-226,-86,-84,-237,-240,-68,-32,-33,-225,-230,-87,-84,-232,-233,-231,-234,-236,-235,]),'LONG':([0,2,4,5,6,7,8,9,10,11,12,14,15,19,21,22,23,24,25,26,27,28,29,31,32,33,34,35,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,64,67,68,69,70,71,72,73,74,75,76,77,78,81,90,91,96,97,98,99,100,101,102,103,104,106,112,116,117,118,119,121,122,123,124,125,126,127,128,129,137,140,141,192,193,197,203,204,205,206,207,208,209,210,211,212,213,214,216,221,222,223,224,225,226,227,228,229,230,231,232,238,251,262,267,294,298,299,304,311,312,313,322,330,333,334,335,336,337,338,340,341,342,348,349,351,353,354,355,356,358,360,361,370,371,372,374,375,377,439,440,449,460,461,462,465,466,467,471,472,473,481,482,485,486,501,510,511,528,558,559,560,561,562,587,588,593,594,],[43,43,-60,-62,-63,-64,-65,-66,-67,43,43,-70,-71,-52,-345,-345,-345,-128,-102,43,-345,-29,-107,-125,-126,-127,-129,-246,-113,-114,-115,-116,-117,-118,-119,-120,-121,-122,-123,-124,-134,-131,-132,-133,-105,-106,-108,-109,-110,-111,-112,-61,43,-91,43,43,-129,-134,43,-98,-99,-100,-101,-104,-134,-90,-72,43,-53,-93,-9,-10,-345,-94,-95,-103,-129,-97,-185,-96,-169,-170,-343,-149,-150,43,43,43,-76,43,-92,43,-30,43,43,43,-186,43,43,43,-152,-159,-344,43,-162,-163,-145,43,-147,43,43,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,43,-77,-73,-69,43,43,43,43,-35,-36,43,43,-171,43,-154,43,-156,-151,-160,-143,-144,-148,-146,-130,43,-177,-178,-223,-222,-227,-229,-242,-87,-84,43,-238,-239,-241,-31,-34,43,43,-172,-173,-153,-155,-161,-224,-228,-226,-86,-84,-237,-240,-68,-32,-33,-225,-230,-87,-84,-232,-233,-231,-234,-236,-235,]),'FLOAT':([0,2,4,5,6,7,8,9,10,11,12,14,15,19,21,22,23,24,25,26,27,28,29,31,32,33,34,35,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,64,67,68,69,70,71,72,73,74,75,76,77,78,81,90,91,96,97,98,99,100,101,102,103,104,106,112,116,117,118,119,121,122,123,124,125,126,127,128,129,137,140,141,192,193,197,203,204,205,206,207,208,209,210,211,212,213,214,216,221,222,223,224,225,226,227,228,229,230,231,232,238,251,262,267,294,298,299,304,311,312,313,322,330,333,334,335,336,337,338,340,341,342,348,349,351,353,354,355,356,358,360,361,370,371,372,374,375,377,439,440,449,460,461,462,465,466,467,471,472,473,481,482,485,486,501,510,511,528,558,559,560,561,562,587,588,593,594,],[44,44,-60,-62,-63,-64,-65,-66,-67,44,44,-70,-71,-52,-345,-345,-345,-128,-102,44,-345,-29,-107,-125,-126,-127,-129,-246,-113,-114,-115,-116,-117,-118,-119,-120,-121,-122,-123,-124,-134,-131,-132,-133,-105,-106,-108,-109,-110,-111,-112,-61,44,-91,44,44,-129,-134,44,-98,-99,-100,-101,-104,-134,-90,-72,44,-53,-93,-9,-10,-345,-94,-95,-103,-129,-97,-185,-96,-169,-170,-343,-149,-150,44,44,44,-76,44,-92,44,-30,44,44,44,-186,44,44,44,-152,-159,-344,44,-162,-163,-145,44,-147,44,44,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,44,-77,-73,-69,44,44,44,44,-35,-36,44,44,-171,44,-154,44,-156,-151,-160,-143,-144,-148,-146,-130,44,-177,-178,-223,-222,-227,-229,-242,-87,-84,44,-238,-239,-241,-31,-34,44,44,-172,-173,-153,-155,-161,-224,-228,-226,-86,-84,-237,-240,-68,-32,-33,-225,-230,-87,-84,-232,-233,-231,-234,-236,-235,]),'DOUBLE':([0,2,4,5,6,7,8,9,10,11,12,14,15,19,21,22,23,24,25,26,27,28,29,31,32,33,34,35,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,64,67,68,69,70,71,72,73,74,75,76,77,78,81,90,91,96,97,98,99,100,101,102,103,104,106,112,116,117,118,119,121,122,123,124,125,126,127,128,129,137,140,141,192,193,197,203,204,205,206,207,208,209,210,211,212,213,214,216,221,222,223,224,225,226,227,228,229,230,231,232,238,251,262,267,294,298,299,304,311,312,313,322,330,333,334,335,336,337,338,340,341,342,348,349,351,353,354,355,356,358,360,361,370,371,372,374,375,377,439,440,449,460,461,462,465,466,467,471,472,473,481,482,485,486,501,510,511,528,558,559,560,561,562,587,588,593,594,],[45,45,-60,-62,-63,-64,-65,-66,-67,45,45,-70,-71,-52,-345,-345,-345,-128,-102,45,-345,-29,-107,-125,-126,-127,-129,-246,-113,-114,-115,-116,-117,-118,-119,-120,-121,-122,-123,-124,-134,-131,-132,-133,-105,-106,-108,-109,-110,-111,-112,-61,45,-91,45,45,-129,-134,45,-98,-99,-100,-101,-104,-134,-90,-72,45,-53,-93,-9,-10,-345,-94,-95,-103,-129,-97,-185,-96,-169,-170,-343,-149,-150,45,45,45,-76,45,-92,45,-30,45,45,45,-186,45,45,45,-152,-159,-344,45,-162,-163,-145,45,-147,45,45,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,45,-77,-73,-69,45,45,45,45,-35,-36,45,45,-171,45,-154,45,-156,-151,-160,-143,-144,-148,-146,-130,45,-177,-178,-223,-222,-227,-229,-242,-87,-84,45,-238,-239,-241,-31,-34,45,45,-172,-173,-153,-155,-161,-224,-228,-226,-86,-84,-237,-240,-68,-32,-33,-225,-230,-87,-84,-232,-233,-231,-234,-236,-235,]),'_COMPLEX':([0,2,4,5,6,7,8,9,10,11,12,14,15,19,21,22,23,24,25,26,27,28,29,31,32,33,34,35,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,64,67,68,69,70,71,72,73,74,75,76,77,78,81,90,91,96,97,98,99,100,101,102,103,104,106,112,116,117,118,119,121,122,123,124,125,126,127,128,129,137,140,141,192,193,197,203,204,205,206,207,208,209,210,211,212,213,214,216,221,222,223,224,225,226,227,228,229,230,231,232,238,251,262,267,294,298,299,304,311,312,313,322,330,333,334,335,336,337,338,340,341,342,348,349,351,353,354,355,356,358,360,361,370,371,372,374,375,377,439,440,449,460,461,462,465,466,467,471,472,473,481,482,485,486,501,510,511,528,558,559,560,561,562,587,588,593,594,],[46,46,-60,-62,-63,-64,-65,-66,-67,46,46,-70,-71,-52,-345,-345,-345,-128,-102,46,-345,-29,-107,-125,-126,-127,-129,-246,-113,-114,-115,-116,-117,-118,-119,-120,-121,-122,-123,-124,-134,-131,-132,-133,-105,-106,-108,-109,-110,-111,-112,-61,46,-91,46,46,-129,-134,46,-98,-99,-100,-101,-104,-134,-90,-72,46,-53,-93,-9,-10,-345,-94,-95,-103,-129,-97,-185,-96,-169,-170,-343,-149,-150,46,46,46,-76,46,-92,46,-30,46,46,46,-186,46,46,46,-152,-159,-344,46,-162,-163,-145,46,-147,46,46,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,46,-77,-73,-69,46,46,46,46,-35,-36,46,46,-171,46,-154,46,-156,-151,-160,-143,-144,-148,-146,-130,46,-177,-178,-223,-222,-227,-229,-242,-87,-84,46,-238,-239,-241,-31,-34,46,46,-172,-173,-153,-155,-161,-224,-228,-226,-86,-84,-237,-240,-68,-32,-33,-225,-230,-87,-84,-232,-233,-231,-234,-236,-235,]),'SIGNED':([0,2,4,5,6,7,8,9,10,11,12,14,15,19,21,22,23,24,25,26,27,28,29,31,32,33,34,35,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,64,67,68,69,70,71,72,73,74,75,76,77,78,81,90,91,96,97,98,99,100,101,102,103,104,106,112,116,117,118,119,121,122,123,124,125,126,127,128,129,137,140,141,192,193,197,203,204,205,206,207,208,209,210,211,212,213,214,216,221,222,223,224,225,226,227,228,229,230,231,232,238,251,262,267,294,298,299,304,311,312,313,322,330,333,334,335,336,337,338,340,341,342,348,349,351,353,354,355,356,358,360,361,370,371,372,374,375,377,439,440,449,460,461,462,465,466,467,471,472,473,481,482,485,486,501,510,511,528,558,559,560,561,562,587,588,593,594,],[47,47,-60,-62,-63,-64,-65,-66,-67,47,47,-70,-71,-52,-345,-345,-345,-128,-102,47,-345,-29,-107,-125,-126,-127,-129,-246,-113,-114,-115,-116,-117,-118,-119,-120,-121,-122,-123,-124,-134,-131,-132,-133,-105,-106,-108,-109,-110,-111,-112,-61,47,-91,47,47,-129,-134,47,-98,-99,-100,-101,-104,-134,-90,-72,47,-53,-93,-9,-10,-345,-94,-95,-103,-129,-97,-185,-96,-169,-170,-343,-149,-150,47,47,47,-76,47,-92,47,-30,47,47,47,-186,47,47,47,-152,-159,-344,47,-162,-163,-145,47,-147,47,47,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,47,-77,-73,-69,47,47,47,47,-35,-36,47,47,-171,47,-154,47,-156,-151,-160,-143,-144,-148,-146,-130,47,-177,-178,-223,-222,-227,-229,-242,-87,-84,47,-238,-239,-241,-31,-34,47,47,-172,-173,-153,-155,-161,-224,-228,-226,-86,-84,-237,-240,-68,-32,-33,-225,-230,-87,-84,-232,-233,-231,-234,-236,-235,]),'UNSIGNED':([0,2,4,5,6,7,8,9,10,11,12,14,15,19,21,22,23,24,25,26,27,28,29,31,32,33,34,35,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,64,67,68,69,70,71,72,73,74,75,76,77,78,81,90,91,96,97,98,99,100,101,102,103,104,106,112,116,117,118,119,121,122,123,124,125,126,127,128,129,137,140,141,192,193,197,203,204,205,206,207,208,209,210,211,212,213,214,216,221,222,223,224,225,226,227,228,229,230,231,232,238,251,262,267,294,298,299,304,311,312,313,322,330,333,334,335,336,337,338,340,341,342,348,349,351,353,354,355,356,358,360,361,370,371,372,374,375,377,439,440,449,460,461,462,465,466,467,471,472,473,481,482,485,486,501,510,511,528,558,559,560,561,562,587,588,593,594,],[48,48,-60,-62,-63,-64,-65,-66,-67,48,48,-70,-71,-52,-345,-345,-345,-128,-102,48,-345,-29,-107,-125,-126,-127,-129,-246,-113,-114,-115,-116,-117,-118,-119,-120,-121,-122,-123,-124,-134,-131,-132,-133,-105,-106,-108,-109,-110,-111,-112,-61,48,-91,48,48,-129,-134,48,-98,-99,-100,-101,-104,-134,-90,-72,48,-53,-93,-9,-10,-345,-94,-95,-103,-129,-97,-185,-96,-169,-170,-343,-149,-150,48,48,48,-76,48,-92,48,-30,48,48,48,-186,48,48,48,-152,-159,-344,48,-162,-163,-145,48,-147,48,48,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,48,-77,-73,-69,48,48,48,48,-35,-36,48,48,-171,48,-154,48,-156,-151,-160,-143,-144,-148,-146,-130,48,-177,-178,-223,-222,-227,-229,-242,-87,-84,48,-238,-239,-241,-31,-34,48,48,-172,-173,-153,-155,-161,-224,-228,-226,-86,-84,-237,-240,-68,-32,-33,-225,-230,-87,-84,-232,-233,-231,-234,-236,-235,]),'__INT128':([0,2,4,5,6,7,8,9,10,11,12,14,15,19,21,22,23,24,25,26,27,28,29,31,32,33,34,35,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,64,67,68,69,70,71,72,73,74,75,76,77,78,81,90,91,96,97,98,99,100,101,102,103,104,106,112,116,117,118,119,121,122,123,124,125,126,127,128,129,137,140,141,192,193,197,203,204,205,206,207,208,209,210,211,212,213,214,216,221,222,223,224,225,226,227,228,229,230,231,232,238,251,262,267,294,298,299,304,311,312,313,322,330,333,334,335,336,337,338,340,341,342,348,349,351,353,354,355,356,358,360,361,370,371,372,374,375,377,439,440,449,460,461,462,465,466,467,471,472,473,481,482,485,486,501,510,511,528,558,559,560,561,562,587,588,593,594,],[49,49,-60,-62,-63,-64,-65,-66,-67,49,49,-70,-71,-52,-345,-345,-345,-128,-102,49,-345,-29,-107,-125,-126,-127,-129,-246,-113,-114,-115,-116,-117,-118,-119,-120,-121,-122,-123,-124,-134,-131,-132,-133,-105,-106,-108,-109,-110,-111,-112,-61,49,-91,49,49,-129,-134,49,-98,-99,-100,-101,-104,-134,-90,-72,49,-53,-93,-9,-10,-345,-94,-95,-103,-129,-97,-185,-96,-169,-170,-343,-149,-150,49,49,49,-76,49,-92,49,-30,49,49,49,-186,49,49,49,-152,-159,-344,49,-162,-163,-145,49,-147,49,49,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,49,-77,-73,-69,49,49,49,49,-35,-36,49,49,-171,49,-154,49,-156,-151,-160,-143,-144,-148,-146,-130,49,-177,-178,-223,-222,-227,-229,-242,-87,-84,49,-238,-239,-241,-31,-34,49,49,-172,-173,-153,-155,-161,-224,-228,-226,-86,-84,-237,-240,-68,-32,-33,-225,-230,-87,-84,-232,-233,-231,-234,-236,-235,]),'_ATOMIC':([0,2,4,5,6,7,8,9,10,11,12,14,15,19,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,64,67,68,69,70,71,72,73,74,75,76,77,78,81,90,91,95,96,97,98,99,100,101,102,103,104,106,112,115,116,117,118,119,121,122,123,124,125,126,127,128,129,136,137,140,141,183,184,192,193,197,203,204,205,206,207,208,209,210,211,212,213,214,216,221,222,223,224,225,226,227,228,229,230,231,232,238,251,258,259,262,267,294,298,299,304,311,312,313,322,323,330,333,334,335,336,337,338,340,341,342,348,349,351,353,354,355,356,358,360,361,370,371,372,374,375,377,439,440,448,449,457,458,460,461,462,465,466,467,471,472,473,481,482,485,486,501,510,511,513,514,528,558,559,560,561,562,587,588,593,594,],[50,50,-60,-62,-63,-64,-65,-66,-67,72,81,-70,-71,-52,72,72,72,-128,-102,109,72,-29,-107,81,-125,-126,-127,72,-246,-113,-114,-115,-116,-117,-118,-119,-120,-121,-122,-123,-124,-134,-131,-132,-133,-105,-106,-108,-109,-110,-111,-112,-61,72,-91,81,109,72,-134,72,-98,-99,-100,-101,-104,-134,-90,-72,81,50,-53,-93,-9,-10,72,-94,-95,-103,-129,-97,81,-185,-96,-169,-170,-343,-149,-150,50,50,50,-76,72,-92,81,50,-30,50,81,81,81,109,-186,50,50,50,-152,-159,-344,81,-162,-163,-145,72,-147,81,72,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,50,-77,81,81,-73,-69,50,50,50,50,-35,-36,50,50,81,-171,50,-154,50,-156,-151,-160,-143,-144,-148,-146,-130,50,-177,-178,-223,-222,-227,-229,-242,-87,-84,72,-238,-239,-241,-31,-34,81,50,81,81,50,-172,-173,-153,-155,-161,-224,-228,-226,-86,-84,-237,-240,-68,-32,-33,81,81,-225,-230,-87,-84,-232,-233,-231,-234,-236,-235,]),'CONST':([0,2,4,5,6,7,8,9,10,11,12,14,15,19,21,22,23,24,25,27,28,29,30,31,32,33,34,35,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,64,67,68,69,71,72,73,74,75,76,77,78,81,90,91,95,96,97,101,104,106,115,116,118,119,121,122,123,124,125,126,127,128,129,136,137,140,141,183,184,192,197,203,204,205,206,207,208,209,210,211,212,213,214,216,221,222,223,224,225,226,227,228,229,230,231,232,238,251,258,259,262,267,294,298,299,304,311,312,313,322,323,330,333,334,335,336,337,338,340,341,342,348,349,351,353,354,355,356,358,360,361,370,371,372,374,375,377,439,440,448,449,457,458,460,461,462,465,466,467,471,472,473,481,482,485,486,501,510,511,513,514,528,558,559,560,561,562,587,588,593,594,],[51,51,-60,-62,-63,-64,-65,-66,-67,51,51,-70,-71,-52,51,51,51,-128,-102,51,-29,-107,51,-125,-126,-127,51,-246,-113,-114,-115,-116,-117,-118,-119,-120,-121,-122,-123,-124,-134,-131,-132,-133,-105,-106,-108,-109,-110,-111,-112,-61,51,-91,51,51,-134,51,-98,-99,-100,-101,-104,-134,-90,-72,51,51,-53,51,-103,-129,51,-185,-169,-170,-343,-149,-150,51,51,51,-76,51,-92,51,51,-30,51,51,51,51,-186,51,51,51,-152,-159,-344,51,-162,-163,-145,51,-147,51,51,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,51,-77,51,51,-73,-69,51,51,51,51,-35,-36,51,51,51,-171,51,-154,51,-156,-151,-160,-143,-144,-148,-146,-130,51,-177,-178,-223,-222,-227,-229,-242,-87,-84,51,-238,-239,-241,-31,-34,51,51,51,51,51,-172,-173,-153,-155,-161,-224,-228,-226,-86,-84,-237,-240,-68,-32,-33,51,51,-225,-230,-87,-84,-232,-233,-231,-234,-236,-235,]),'RESTRICT':([0,2,4,5,6,7,8,9,10,11,12,14,15,19,21,22,23,24,25,27,28,29,30,31,32,33,34,35,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,64,67,68,69,71,72,73,74,75,76,77,78,81,90,91,95,96,97,101,104,106,115,116,118,119,121,122,123,124,125,126,127,128,129,136,137,140,141,183,184,192,197,203,204,205,206,207,208,209,210,211,212,213,214,216,221,222,223,224,225,226,227,228,229,230,231,232,238,251,258,259,262,267,294,298,299,304,311,312,313,322,323,330,333,334,335,336,337,338,340,341,342,348,349,351,353,354,355,356,358,360,361,370,371,372,374,375,377,439,440,448,449,457,458,460,461,462,465,466,467,471,472,473,481,482,485,486,501,510,511,513,514,528,558,559,560,561,562,587,588,593,594,],[52,52,-60,-62,-63,-64,-65,-66,-67,52,52,-70,-71,-52,52,52,52,-128,-102,52,-29,-107,52,-125,-126,-127,52,-246,-113,-114,-115,-116,-117,-118,-119,-120,-121,-122,-123,-124,-134,-131,-132,-133,-105,-106,-108,-109,-110,-111,-112,-61,52,-91,52,52,-134,52,-98,-99,-100,-101,-104,-134,-90,-72,52,52,-53,52,-103,-129,52,-185,-169,-170,-343,-149,-150,52,52,52,-76,52,-92,52,52,-30,52,52,52,52,-186,52,52,52,-152,-159,-344,52,-162,-163,-145,52,-147,52,52,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,52,-77,52,52,-73,-69,52,52,52,52,-35,-36,52,52,52,-171,52,-154,52,-156,-151,-160,-143,-144,-148,-146,-130,52,-177,-178,-223,-222,-227,-229,-242,-87,-84,52,-238,-239,-241,-31,-34,52,52,52,52,52,-172,-173,-153,-155,-161,-224,-228,-226,-86,-84,-237,-240,-68,-32,-33,52,52,-225,-230,-87,-84,-232,-233,-231,-234,-236,-235,]),'VOLATILE':([0,2,4,5,6,7,8,9,10,11,12,14,15,19,21,22,23,24,25,27,28,29,30,31,32,33,34,35,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,64,67,68,69,71,72,73,74,75,76,77,78,81,90,91,95,96,97,101,104,106,115,116,118,119,121,122,123,124,125,126,127,128,129,136,137,140,141,183,184,192,197,203,204,205,206,207,208,209,210,211,212,213,214,216,221,222,223,224,225,226,227,228,229,230,231,232,238,251,258,259,262,267,294,298,299,304,311,312,313,322,323,330,333,334,335,336,337,338,340,341,342,348,349,351,353,354,355,356,358,360,361,370,371,372,374,375,377,439,440,448,449,457,458,460,461,462,465,466,467,471,472,473,481,482,485,486,501,510,511,513,514,528,558,559,560,561,562,587,588,593,594,],[53,53,-60,-62,-63,-64,-65,-66,-67,53,53,-70,-71,-52,53,53,53,-128,-102,53,-29,-107,53,-125,-126,-127,53,-246,-113,-114,-115,-116,-117,-118,-119,-120,-121,-122,-123,-124,-134,-131,-132,-133,-105,-106,-108,-109,-110,-111,-112,-61,53,-91,53,53,-134,53,-98,-99,-100,-101,-104,-134,-90,-72,53,53,-53,53,-103,-129,53,-185,-169,-170,-343,-149,-150,53,53,53,-76,53,-92,53,53,-30,53,53,53,53,-186,53,53,53,-152,-159,-344,53,-162,-163,-145,53,-147,53,53,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,53,-77,53,53,-73,-69,53,53,53,53,-35,-36,53,53,53,-171,53,-154,53,-156,-151,-160,-143,-144,-148,-146,-130,53,-177,-178,-223,-222,-227,-229,-242,-87,-84,53,-238,-239,-241,-31,-34,53,53,53,53,53,-172,-173,-153,-155,-161,-224,-228,-226,-86,-84,-237,-240,-68,-32,-33,53,53,-225,-230,-87,-84,-232,-233,-231,-234,-236,-235,]),'AUTO':([0,2,4,5,6,7,8,9,10,11,12,14,15,19,21,22,23,24,25,27,28,29,31,32,33,34,35,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,64,67,68,69,71,72,73,74,75,76,77,78,81,90,91,96,97,101,104,106,118,119,121,122,123,127,128,129,137,140,192,206,208,221,222,223,224,225,226,227,228,229,230,231,232,251,262,267,311,312,313,322,330,334,336,337,349,351,353,354,355,356,358,360,361,370,371,372,374,375,377,439,440,449,460,461,462,465,466,471,472,473,481,482,485,486,501,510,511,528,558,559,560,561,562,587,588,593,594,],[54,54,-60,-62,-63,-64,-65,-66,-67,54,54,-70,-71,-52,54,54,54,-128,-102,54,-29,-107,-125,-126,-127,54,-246,-113,-114,-115,-116,-117,-118,-119,-120,-121,-122,-123,-124,-134,-131,-132,-133,-105,-106,-108,-109,-110,-111,-112,-61,54,-91,54,54,-134,54,-98,-99,-100,-101,-104,-134,-90,-72,54,-53,54,-103,-129,-169,-170,-343,-149,-150,-76,54,-92,54,-30,54,-152,-344,54,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,-77,-73,-69,-35,-36,54,54,-171,-154,-156,-151,-130,54,-177,-178,-223,-222,-227,-229,-242,-87,-84,54,-238,-239,-241,-31,-34,54,54,-172,-173,-153,-155,-224,-228,-226,-86,-84,-237,-240,-68,-32,-33,-225,-230,-87,-84,-232,-233,-231,-234,-236,-235,]),'REGISTER':([0,2,4,5,6,7,8,9,10,11,12,14,15,19,21,22,23,24,25,27,28,29,31,32,33,34,35,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,64,67,68,69,71,72,73,74,75,76,77,78,81,90,91,96,97,101,104,106,118,119,121,122,123,127,128,129,137,140,192,206,208,221,222,223,224,225,226,227,228,229,230,231,232,251,262,267,311,312,313,322,330,334,336,337,349,351,353,354,355,356,358,360,361,370,371,372,374,375,377,439,440,449,460,461,462,465,466,471,472,473,481,482,485,486,501,510,511,528,558,559,560,561,562,587,588,593,594,],[55,55,-60,-62,-63,-64,-65,-66,-67,55,55,-70,-71,-52,55,55,55,-128,-102,55,-29,-107,-125,-126,-127,55,-246,-113,-114,-115,-116,-117,-118,-119,-120,-121,-122,-123,-124,-134,-131,-132,-133,-105,-106,-108,-109,-110,-111,-112,-61,55,-91,55,55,-134,55,-98,-99,-100,-101,-104,-134,-90,-72,55,-53,55,-103,-129,-169,-170,-343,-149,-150,-76,55,-92,55,-30,55,-152,-344,55,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,-77,-73,-69,-35,-36,55,55,-171,-154,-156,-151,-130,55,-177,-178,-223,-222,-227,-229,-242,-87,-84,55,-238,-239,-241,-31,-34,55,55,-172,-173,-153,-155,-224,-228,-226,-86,-84,-237,-240,-68,-32,-33,-225,-230,-87,-84,-232,-233,-231,-234,-236,-235,]),'STATIC':([0,2,4,5,6,7,8,9,10,11,12,14,15,19,21,22,23,24,25,27,28,29,31,32,33,34,35,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,64,67,68,69,71,72,73,74,75,76,77,78,81,90,91,95,96,97,101,104,106,116,118,119,121,122,123,127,128,129,136,137,140,184,192,197,206,208,221,222,223,224,225,226,227,228,229,230,231,232,251,259,262,267,311,312,313,322,323,330,334,336,337,349,351,353,354,355,356,358,360,361,370,371,372,374,375,377,439,440,448,449,458,460,461,462,465,466,471,472,473,481,482,485,486,501,510,511,514,528,558,559,560,561,562,587,588,593,594,],[29,29,-60,-62,-63,-64,-65,-66,-67,29,29,-70,-71,-52,29,29,29,-128,-102,29,-29,-107,-125,-126,-127,29,-246,-113,-114,-115,-116,-117,-118,-119,-120,-121,-122,-123,-124,-134,-131,-132,-133,-105,-106,-108,-109,-110,-111,-112,-61,29,-91,29,29,-134,29,-98,-99,-100,-101,-104,-134,-90,-72,183,29,-53,29,-103,-129,-185,-169,-170,-343,-149,-150,-76,29,-92,258,29,-30,310,29,-186,-152,-344,29,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,-77,402,-73,-69,-35,-36,29,29,457,-171,-154,-156,-151,-130,29,-177,-178,-223,-222,-227,-229,-242,-87,-84,29,-238,-239,-241,-31,-34,513,29,522,29,-172,-173,-153,-155,-224,-228,-226,-86,-84,-237,-240,-68,-32,-33,549,-225,-230,-87,-84,-232,-233,-231,-234,-236,-235,]),'EXTERN':([0,2,4,5,6,7,8,9,10,11,12,14,15,19,21,22,23,24,25,27,28,29,31,32,33,34,35,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,64,67,68,69,71,72,73,74,75,76,77,78,81,90,91,96,97,101,104,106,118,119,121,122,123,127,128,129,137,140,192,206,208,221,222,223,224,225,226,227,228,229,230,231,232,251,262,267,311,312,313,322,330,334,336,337,349,351,353,354,355,356,358,360,361,370,371,372,374,375,377,439,440,449,460,461,462,465,466,471,472,473,481,482,485,486,501,510,511,528,558,559,560,561,562,587,588,593,594,],[56,56,-60,-62,-63,-64,-65,-66,-67,56,56,-70,-71,-52,56,56,56,-128,-102,56,-29,-107,-125,-126,-127,56,-246,-113,-114,-115,-116,-117,-118,-119,-120,-121,-122,-123,-124,-134,-131,-132,-133,-105,-106,-108,-109,-110,-111,-112,-61,56,-91,56,56,-134,56,-98,-99,-100,-101,-104,-134,-90,-72,56,-53,56,-103,-129,-169,-170,-343,-149,-150,-76,56,-92,56,-30,56,-152,-344,56,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,-77,-73,-69,-35,-36,56,56,-171,-154,-156,-151,-130,56,-177,-178,-223,-222,-227,-229,-242,-87,-84,56,-238,-239,-241,-31,-34,56,56,-172,-173,-153,-155,-224,-228,-226,-86,-84,-237,-240,-68,-32,-33,-225,-230,-87,-84,-232,-233,-231,-234,-236,-235,]),'TYPEDEF':([0,2,4,5,6,7,8,9,10,11,12,14,15,19,21,22,23,24,25,27,28,29,31,32,33,34,35,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,64,67,68,69,71,72,73,74,75,76,77,78,81,90,91,96,97,101,104,106,118,119,121,122,123,127,128,129,137,140,192,206,208,221,222,223,224,225,226,227,228,229,230,231,232,251,262,267,311,312,313,322,330,334,336,337,349,351,353,354,355,356,358,360,361,370,371,372,374,375,377,439,440,449,460,461,462,465,466,471,472,473,481,482,485,486,501,510,511,528,558,559,560,561,562,587,588,593,594,],[57,57,-60,-62,-63,-64,-65,-66,-67,57,57,-70,-71,-52,57,57,57,-128,-102,57,-29,-107,-125,-126,-127,57,-246,-113,-114,-115,-116,-117,-118,-119,-120,-121,-122,-123,-124,-134,-131,-132,-133,-105,-106,-108,-109,-110,-111,-112,-61,57,-91,57,57,-134,57,-98,-99,-100,-101,-104,-134,-90,-72,57,-53,57,-103,-129,-169,-170,-343,-149,-150,-76,57,-92,57,-30,57,-152,-344,57,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,-77,-73,-69,-35,-36,57,57,-171,-154,-156,-151,-130,57,-177,-178,-223,-222,-227,-229,-242,-87,-84,57,-238,-239,-241,-31,-34,57,57,-172,-173,-153,-155,-224,-228,-226,-86,-84,-237,-240,-68,-32,-33,-225,-230,-87,-84,-232,-233,-231,-234,-236,-235,]),'_THREAD_LOCAL':([0,2,4,5,6,7,8,9,10,11,12,14,15,19,21,22,23,24,25,27,28,29,31,32,33,34,35,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,64,67,68,69,71,72,73,74,75,76,77,78,81,90,91,96,97,101,104,106,118,119,121,122,123,127,128,129,137,140,192,206,208,221,222,223,224,225,226,227,228,229,230,231,232,251,262,267,311,312,313,322,330,334,336,337,349,351,353,354,355,356,358,360,361,370,371,372,374,375,377,439,440,449,460,461,462,465,466,471,472,473,481,482,485,486,501,510,511,528,558,559,560,561,562,587,588,593,594,],[58,58,-60,-62,-63,-64,-65,-66,-67,58,58,-70,-71,-52,58,58,58,-128,-102,58,-29,-107,-125,-126,-127,58,-246,-113,-114,-115,-116,-117,-118,-119,-120,-121,-122,-123,-124,-134,-131,-132,-133,-105,-106,-108,-109,-110,-111,-112,-61,58,-91,58,58,-134,58,-98,-99,-100,-101,-104,-134,-90,-72,58,-53,58,-103,-129,-169,-170,-343,-149,-150,-76,58,-92,58,-30,58,-152,-344,58,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,-77,-73,-69,-35,-36,58,58,-171,-154,-156,-151,-130,58,-177,-178,-223,-222,-227,-229,-242,-87,-84,58,-238,-239,-241,-31,-34,58,58,-172,-173,-153,-155,-224,-228,-226,-86,-84,-237,-240,-68,-32,-33,-225,-230,-87,-84,-232,-233,-231,-234,-236,-235,]),'INLINE':([0,2,4,5,6,7,8,9,10,11,12,14,15,19,21,22,23,24,25,27,28,29,31,32,33,34,35,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,64,67,68,69,71,72,73,74,75,76,77,78,81,90,91,96,97,101,104,106,118,119,121,122,123,127,128,129,137,140,192,206,208,221,222,223,224,225,226,227,228,229,230,231,232,251,262,267,311,312,313,322,330,334,336,337,349,351,353,354,355,356,358,360,361,370,371,372,374,375,377,439,440,449,460,461,462,465,466,471,472,473,481,482,485,486,501,510,511,528,558,559,560,561,562,587,588,593,594,],[59,59,-60,-62,-63,-64,-65,-66,-67,59,59,-70,-71,-52,59,59,59,-128,-102,59,-29,-107,-125,-126,-127,59,-246,-113,-114,-115,-116,-117,-118,-119,-120,-121,-122,-123,-124,-134,-131,-132,-133,-105,-106,-108,-109,-110,-111,-112,-61,59,-91,59,59,-134,59,-98,-99,-100,-101,-104,-134,-90,-72,59,-53,59,-103,-129,-169,-170,-343,-149,-150,-76,59,-92,59,-30,59,-152,-344,59,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,-77,-73,-69,-35,-36,59,59,-171,-154,-156,-151,-130,59,-177,-178,-223,-222,-227,-229,-242,-87,-84,59,-238,-239,-241,-31,-34,59,59,-172,-173,-153,-155,-224,-228,-226,-86,-84,-237,-240,-68,-32,-33,-225,-230,-87,-84,-232,-233,-231,-234,-236,-235,]),'_NORETURN':([0,2,4,5,6,7,8,9,10,11,12,14,15,19,21,22,23,24,25,27,28,29,31,32,33,34,35,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,64,67,68,69,71,72,73,74,75,76,77,78,81,90,91,96,97,101,104,106,118,119,121,122,123,127,128,129,137,140,192,206,208,221,222,223,224,225,226,227,228,229,230,231,232,251,262,267,311,312,313,322,330,334,336,337,349,351,353,354,355,356,358,360,361,370,371,372,374,375,377,439,440,449,460,461,462,465,466,471,472,473,481,482,485,486,501,510,511,528,558,559,560,561,562,587,588,593,594,],[60,60,-60,-62,-63,-64,-65,-66,-67,60,60,-70,-71,-52,60,60,60,-128,-102,60,-29,-107,-125,-126,-127,60,-246,-113,-114,-115,-116,-117,-118,-119,-120,-121,-122,-123,-124,-134,-131,-132,-133,-105,-106,-108,-109,-110,-111,-112,-61,60,-91,60,60,-134,60,-98,-99,-100,-101,-104,-134,-90,-72,60,-53,60,-103,-129,-169,-170,-343,-149,-150,-76,60,-92,60,-30,60,-152,-344,60,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,-77,-73,-69,-35,-36,60,60,-171,-154,-156,-151,-130,60,-177,-178,-223,-222,-227,-229,-242,-87,-84,60,-238,-239,-241,-31,-34,60,60,-172,-173,-153,-155,-224,-228,-226,-86,-84,-237,-240,-68,-32,-33,-225,-230,-87,-84,-232,-233,-231,-234,-236,-235,]),'_ALIGNAS':([0,2,4,5,6,7,8,9,10,11,12,14,15,19,21,22,23,24,25,27,28,29,31,32,33,34,35,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,64,67,68,69,71,72,73,74,75,76,77,78,81,90,91,96,97,101,104,106,118,119,121,122,123,124,125,126,127,128,129,137,140,141,192,203,204,205,206,207,208,209,210,211,212,214,216,221,222,223,224,225,226,227,228,229,230,231,232,238,251,262,267,294,298,299,304,311,312,313,322,330,333,334,335,336,337,338,340,341,342,348,349,351,353,354,355,356,358,360,361,370,371,372,374,375,377,439,440,449,460,461,462,465,466,467,471,472,473,481,482,485,486,501,510,511,528,558,559,560,561,562,587,588,593,594,],[61,61,-60,-62,-63,-64,-65,-66,-67,61,61,-70,-71,-52,61,61,61,-128,-102,61,-29,-107,-125,-126,-127,61,-246,-113,-114,-115,-116,-117,-118,-119,-120,-121,-122,-123,-124,-134,-131,-132,-133,-105,-106,-108,-109,-110,-111,-112,-61,61,-91,61,61,-134,61,-98,-99,-100,-101,-104,-134,-90,-72,61,-53,61,-103,-129,-169,-170,-343,-149,-150,61,61,61,-76,61,-92,61,-30,61,61,61,61,61,-152,-159,-344,61,-162,-163,-145,-147,61,61,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,61,-77,-73,-69,61,61,61,61,-35,-36,61,61,-171,61,-154,61,-156,-151,-160,-143,-144,-148,-146,-130,61,-177,-178,-223,-222,-227,-229,-242,-87,-84,61,-238,-239,-241,-31,-34,61,61,-172,-173,-153,-155,-161,-224,-228,-226,-86,-84,-237,-240,-68,-32,-33,-225,-230,-87,-84,-232,-233,-231,-234,-236,-235,]),'STRUCT':([0,2,4,5,6,7,8,9,10,11,14,15,19,21,22,23,26,27,28,29,34,50,51,52,53,54,55,56,57,58,59,60,64,67,68,70,71,72,73,90,91,96,97,98,99,100,101,102,103,112,116,117,121,124,125,126,127,128,129,137,140,141,193,197,203,204,205,207,208,210,211,213,221,222,223,224,225,226,227,228,229,230,231,232,238,251,262,267,294,298,299,304,311,312,313,322,333,335,338,349,351,353,354,355,356,358,360,361,370,371,372,374,375,377,439,440,449,460,467,471,472,473,481,482,485,486,501,510,511,528,558,559,560,561,562,587,588,593,594,],[62,62,-60,-62,-63,-64,-65,-66,-67,62,-70,-71,-52,-345,-345,-345,62,-345,-29,-107,-345,-134,-131,-132,-133,-105,-106,-108,-109,-110,-111,-112,-61,62,-91,62,-345,-134,62,-90,-72,62,-53,-93,-9,-10,-345,-94,-95,-97,-185,-96,-343,62,62,62,-76,62,-92,62,-30,62,62,-186,62,62,62,-159,-344,-162,-163,62,62,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,62,-77,-73,-69,62,62,62,62,-35,-36,62,62,62,62,-160,-130,62,-177,-178,-223,-222,-227,-229,-242,-87,-84,62,-238,-239,-241,-31,-34,62,62,-161,-224,-228,-226,-86,-84,-237,-240,-68,-32,-33,-225,-230,-87,-84,-232,-233,-231,-234,-236,-235,]),'UNION':([0,2,4,5,6,7,8,9,10,11,14,15,19,21,22,23,26,27,28,29,34,50,51,52,53,54,55,56,57,58,59,60,64,67,68,70,71,72,73,90,91,96,97,98,99,100,101,102,103,112,116,117,121,124,125,126,127,128,129,137,140,141,193,197,203,204,205,207,208,210,211,213,221,222,223,224,225,226,227,228,229,230,231,232,238,251,262,267,294,298,299,304,311,312,313,322,333,335,338,349,351,353,354,355,356,358,360,361,370,371,372,374,375,377,439,440,449,460,467,471,472,473,481,482,485,486,501,510,511,528,558,559,560,561,562,587,588,593,594,],[63,63,-60,-62,-63,-64,-65,-66,-67,63,-70,-71,-52,-345,-345,-345,63,-345,-29,-107,-345,-134,-131,-132,-133,-105,-106,-108,-109,-110,-111,-112,-61,63,-91,63,-345,-134,63,-90,-72,63,-53,-93,-9,-10,-345,-94,-95,-97,-185,-96,-343,63,63,63,-76,63,-92,63,-30,63,63,-186,63,63,63,-159,-344,-162,-163,63,63,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,63,-77,-73,-69,63,63,63,63,-35,-36,63,63,63,63,-160,-130,63,-177,-178,-223,-222,-227,-229,-242,-87,-84,63,-238,-239,-241,-31,-34,63,63,-161,-224,-228,-226,-86,-84,-237,-240,-68,-32,-33,-225,-230,-87,-84,-232,-233,-231,-234,-236,-235,]),'LBRACE':([11,15,19,28,36,37,62,63,65,66,67,68,73,90,91,97,118,119,121,122,123,128,129,131,135,140,195,208,221,222,223,224,225,226,227,228,229,230,231,232,238,242,256,262,267,311,312,355,356,358,360,361,369,370,371,374,375,377,392,393,394,405,439,440,471,472,473,476,481,482,485,486,489,491,500,501,506,507,510,511,528,529,530,531,536,537,558,559,560,561,562,568,578,587,588,590,592,593,594,],[-345,-71,-52,-29,121,121,-157,-158,121,-7,-8,-91,-345,-90,-72,-53,121,121,-343,121,121,121,-92,121,121,-30,121,-344,121,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,121,121,-345,-73,-69,-35,-36,-223,-222,121,121,-242,121,-87,-74,-238,-239,-241,-11,121,-12,121,-31,-34,-224,121,-226,121,-86,-75,-237,-240,-345,-201,-345,-68,121,121,-32,-33,-225,121,121,121,121,-11,-230,-87,-74,-232,-233,-345,121,-231,-234,121,121,-236,-235,]),'RBRACE':([15,90,91,121,124,128,139,143,144,145,146,151,152,158,159,162,163,164,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,200,201,202,203,204,205,207,208,210,211,219,220,221,222,223,224,225,226,227,228,229,230,231,232,249,250,255,256,262,263,267,291,292,293,295,296,297,300,301,302,303,328,329,331,333,335,338,355,356,358,360,361,370,371,374,375,377,390,391,392,406,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,429,431,432,433,434,463,464,467,471,472,473,475,481,482,485,486,487,488,489,490,499,501,503,504,507,508,528,535,541,542,558,559,560,561,562,566,567,568,569,582,587,588,593,594,],[-71,-90,-72,-343,208,-345,-333,-311,-260,-261,-263,-282,-284,-297,-320,-309,-310,-312,-321,-322,-323,-324,-325,-326,-327,-328,-329,-330,-331,-332,-335,-336,-337,-338,208,-174,-179,208,208,208,-159,-344,-162,-163,208,-5,-6,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,-247,-282,-196,-345,-73,-334,-69,-305,-306,-285,-286,-287,-288,-339,-340,-341,-342,208,208,-175,208,208,-160,-223,-222,-227,-229,-242,-87,-84,-238,-239,-241,208,-22,-21,-313,-264,-265,-266,-267,-268,-269,-270,-271,-272,-273,-274,-275,-276,-277,-278,-279,-280,-281,-300,-301,-302,-303,-304,-176,-180,-161,-224,-228,-226,-245,-86,-84,-237,-240,-248,-197,208,-199,-283,-68,-298,-299,-289,-290,-225,-198,208,-262,-230,-87,-84,-232,-233,-200,-307,208,-314,-308,-231,-234,-236,-235,]),'CASE':([15,90,91,121,128,208,221,222,223,224,225,226,227,228,229,230,231,232,242,262,267,355,356,358,360,361,369,370,371,374,375,377,471,472,473,481,482,485,486,501,528,529,530,531,558,559,560,561,562,578,587,588,590,592,593,594,],[-71,-90,-72,-343,234,-344,234,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,234,-73,-69,-223,-222,234,234,-242,234,-87,-74,-238,-239,-241,-224,234,-226,-86,-75,-237,-240,-68,-225,234,234,234,-230,-87,-74,-232,-233,234,-231,-234,234,234,-236,-235,]),'DEFAULT':([15,90,91,121,128,208,221,222,223,224,225,226,227,228,229,230,231,232,242,262,267,355,356,358,360,361,369,370,371,374,375,377,471,472,473,481,482,485,486,501,528,529,530,531,558,559,560,561,562,578,587,588,590,592,593,594,],[-71,-90,-72,-343,235,-344,235,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,235,-73,-69,-223,-222,235,235,-242,235,-87,-74,-238,-239,-241,-224,235,-226,-86,-75,-237,-240,-68,-225,235,235,235,-230,-87,-74,-232,-233,235,-231,-234,235,235,-236,-235,]),'IF':([15,90,91,121,128,208,221,222,223,224,225,226,227,228,229,230,231,232,242,262,267,355,356,358,360,361,369,370,371,374,375,377,471,472,473,481,482,485,486,501,528,529,530,531,558,559,560,561,562,578,587,588,590,592,593,594,],[-71,-90,-72,-343,237,-344,237,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,237,-73,-69,-223,-222,237,237,-242,237,-87,-74,-238,-239,-241,-224,237,-226,-86,-75,-237,-240,-68,-225,237,237,237,-230,-87,-74,-232,-233,237,-231,-234,237,237,-236,-235,]),'SWITCH':([15,90,91,121,128,208,221,222,223,224,225,226,227,228,229,230,231,232,242,262,267,355,356,358,360,361,369,370,371,374,375,377,471,472,473,481,482,485,486,501,528,529,530,531,558,559,560,561,562,578,587,588,590,592,593,594,],[-71,-90,-72,-343,240,-344,240,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,240,-73,-69,-223,-222,240,240,-242,240,-87,-74,-238,-239,-241,-224,240,-226,-86,-75,-237,-240,-68,-225,240,240,240,-230,-87,-74,-232,-233,240,-231,-234,240,240,-236,-235,]),'WHILE':([15,90,91,121,128,208,221,222,223,224,225,226,227,228,229,230,231,232,242,262,267,355,356,358,360,361,368,369,370,371,374,375,377,471,472,473,481,482,485,486,501,528,529,530,531,558,559,560,561,562,578,587,588,590,592,593,594,],[-71,-90,-72,-343,241,-344,241,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,241,-73,-69,-223,-222,241,241,-242,480,241,-87,-74,-238,-239,-241,-224,241,-226,-86,-75,-237,-240,-68,-225,241,241,241,-230,-87,-74,-232,-233,241,-231,-234,241,241,-236,-235,]),'DO':([15,90,91,121,128,208,221,222,223,224,225,226,227,228,229,230,231,232,242,262,267,355,356,358,360,361,369,370,371,374,375,377,471,472,473,481,482,485,486,501,528,529,530,531,558,559,560,561,562,578,587,588,590,592,593,594,],[-71,-90,-72,-343,242,-344,242,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,242,-73,-69,-223,-222,242,242,-242,242,-87,-74,-238,-239,-241,-224,242,-226,-86,-75,-237,-240,-68,-225,242,242,242,-230,-87,-74,-232,-233,242,-231,-234,242,242,-236,-235,]),'FOR':([15,90,91,121,128,208,221,222,223,224,225,226,227,228,229,230,231,232,242,262,267,355,356,358,360,361,369,370,371,374,375,377,471,472,473,481,482,485,486,501,528,529,530,531,558,559,560,561,562,578,587,588,590,592,593,594,],[-71,-90,-72,-343,243,-344,243,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,243,-73,-69,-223,-222,243,243,-242,243,-87,-74,-238,-239,-241,-224,243,-226,-86,-75,-237,-240,-68,-225,243,243,243,-230,-87,-74,-232,-233,243,-231,-234,243,243,-236,-235,]),'GOTO':([15,90,91,121,128,208,221,222,223,224,225,226,227,228,229,230,231,232,242,262,267,355,356,358,360,361,369,370,371,374,375,377,471,472,473,481,482,485,486,501,528,529,530,531,558,559,560,561,562,578,587,588,590,592,593,594,],[-71,-90,-72,-343,244,-344,244,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,244,-73,-69,-223,-222,244,244,-242,244,-87,-74,-238,-239,-241,-224,244,-226,-86,-75,-237,-240,-68,-225,244,244,244,-230,-87,-74,-232,-233,244,-231,-234,244,244,-236,-235,]),'BREAK':([15,90,91,121,128,208,221,222,223,224,225,226,227,228,229,230,231,232,242,262,267,355,356,358,360,361,369,370,371,374,375,377,471,472,473,481,482,485,486,501,528,529,530,531,558,559,560,561,562,578,587,588,590,592,593,594,],[-71,-90,-72,-343,245,-344,245,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,245,-73,-69,-223,-222,245,245,-242,245,-87,-74,-238,-239,-241,-224,245,-226,-86,-75,-237,-240,-68,-225,245,245,245,-230,-87,-74,-232,-233,245,-231,-234,245,245,-236,-235,]),'CONTINUE':([15,90,91,121,128,208,221,222,223,224,225,226,227,228,229,230,231,232,242,262,267,355,356,358,360,361,369,370,371,374,375,377,471,472,473,481,482,485,486,501,528,529,530,531,558,559,560,561,562,578,587,588,590,592,593,594,],[-71,-90,-72,-343,246,-344,246,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,246,-73,-69,-223,-222,246,246,-242,246,-87,-74,-238,-239,-241,-224,246,-226,-86,-75,-237,-240,-68,-225,246,246,246,-230,-87,-74,-232,-233,246,-231,-234,246,246,-236,-235,]),'RETURN':([15,90,91,121,128,208,221,222,223,224,225,226,227,228,229,230,231,232,242,262,267,355,356,358,360,361,369,370,371,374,375,377,471,472,473,481,482,485,486,501,528,529,530,531,558,559,560,561,562,578,587,588,590,592,593,594,],[-71,-90,-72,-343,247,-344,247,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,247,-73,-69,-223,-222,247,247,-242,247,-87,-74,-238,-239,-241,-224,247,-226,-86,-75,-237,-240,-68,-225,247,247,247,-230,-87,-74,-232,-233,247,-231,-234,247,247,-236,-235,]),'PLUSPLUS':([15,51,52,53,81,90,91,94,95,114,115,116,121,126,128,135,136,139,141,143,147,148,149,150,152,153,154,155,156,158,159,160,161,162,163,164,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,195,197,208,221,222,223,224,225,226,227,228,229,230,231,232,233,234,238,242,247,256,257,258,259,262,263,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,291,292,294,298,300,301,302,303,306,309,310,323,332,347,355,356,358,360,361,362,365,366,367,369,370,371,372,374,375,377,378,379,380,381,382,383,384,385,386,387,388,389,392,393,394,397,400,401,402,405,406,429,431,432,433,434,448,455,457,458,459,469,471,472,473,476,481,482,484,485,486,489,491,500,501,502,503,504,505,512,513,514,521,522,524,528,529,530,531,532,533,536,537,547,548,549,558,559,560,561,562,565,567,568,569,571,578,580,582,587,588,590,592,593,594,],[-71,-131,-132,-133,-134,-90,-72,153,-345,-27,-28,-185,-343,153,153,153,-345,-333,153,-311,-292,-293,-294,-291,291,153,153,153,153,-297,-320,-295,-296,-309,-310,-312,-321,-322,-323,-324,-325,-326,-327,-328,-329,-330,-331,-332,-335,-336,-337,-338,153,-345,-28,153,-186,-344,153,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,-320,153,153,153,153,-345,153,-345,-28,-73,-334,-69,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,-305,-306,153,153,-339,-340,-341,-342,-292,153,153,-345,153,153,-223,-222,153,153,-242,153,153,153,153,153,-87,-74,153,-238,-239,-241,153,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-11,153,-12,153,-292,153,153,153,-313,-300,-301,-302,-303,-304,-345,153,-345,-28,153,153,-224,153,-226,153,-86,-75,153,-237,-240,-345,-201,-345,-68,153,-298,-299,153,153,-345,-28,153,153,-292,-225,153,153,153,153,153,153,-11,-292,153,153,-230,-87,-74,-232,-233,153,-307,-345,-314,153,153,153,-308,-231,-234,153,153,-236,-235,]),'MINUSMINUS':([15,51,52,53,81,90,91,94,95,114,115,116,121,126,128,135,136,139,141,143,147,148,149,150,152,153,154,155,156,158,159,160,161,162,163,164,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,195,197,208,221,222,223,224,225,226,227,228,229,230,231,232,233,234,238,242,247,256,257,258,259,262,263,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,291,292,294,298,300,301,302,303,306,309,310,323,332,347,355,356,358,360,361,362,365,366,367,369,370,371,372,374,375,377,378,379,380,381,382,383,384,385,386,387,388,389,392,393,394,397,400,401,402,405,406,429,431,432,433,434,448,455,457,458,459,469,471,472,473,476,481,482,484,485,486,489,491,500,501,502,503,504,505,512,513,514,521,522,524,528,529,530,531,532,533,536,537,547,548,549,558,559,560,561,562,565,567,568,569,571,578,580,582,587,588,590,592,593,594,],[-71,-131,-132,-133,-134,-90,-72,154,-345,-27,-28,-185,-343,154,154,154,-345,-333,154,-311,-292,-293,-294,-291,292,154,154,154,154,-297,-320,-295,-296,-309,-310,-312,-321,-322,-323,-324,-325,-326,-327,-328,-329,-330,-331,-332,-335,-336,-337,-338,154,-345,-28,154,-186,-344,154,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,-320,154,154,154,154,-345,154,-345,-28,-73,-334,-69,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,-305,-306,154,154,-339,-340,-341,-342,-292,154,154,-345,154,154,-223,-222,154,154,-242,154,154,154,154,154,-87,-74,154,-238,-239,-241,154,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-11,154,-12,154,-292,154,154,154,-313,-300,-301,-302,-303,-304,-345,154,-345,-28,154,154,-224,154,-226,154,-86,-75,154,-237,-240,-345,-201,-345,-68,154,-298,-299,154,154,-345,-28,154,154,-292,-225,154,154,154,154,154,154,-11,-292,154,154,-230,-87,-74,-232,-233,154,-307,-345,-314,154,154,154,-308,-231,-234,154,154,-236,-235,]),'SIZEOF':([15,51,52,53,81,90,91,94,95,114,115,116,121,126,128,135,136,141,147,148,149,150,153,154,155,156,160,161,182,183,184,195,197,208,221,222,223,224,225,226,227,228,229,230,231,232,234,238,242,247,256,257,258,259,262,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,294,298,306,309,310,323,332,347,355,356,358,360,361,362,365,366,367,369,370,371,372,374,375,377,378,379,380,381,382,383,384,385,386,387,388,389,392,393,394,397,400,401,402,405,448,455,457,458,459,469,471,472,473,476,481,482,484,485,486,489,491,500,501,502,505,512,513,514,521,522,524,528,529,530,531,532,533,536,537,547,548,549,558,559,560,561,562,565,568,571,578,580,587,588,590,592,593,594,],[-71,-131,-132,-133,-134,-90,-72,156,-345,-27,-28,-185,-343,156,156,156,-345,156,-292,-293,-294,-291,156,156,156,156,-295,-296,156,-345,-28,156,-186,-344,156,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,156,156,156,156,-345,156,-345,-28,-73,-69,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,-292,156,156,-345,156,156,-223,-222,156,156,-242,156,156,156,156,156,-87,-74,156,-238,-239,-241,156,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-11,156,-12,156,-292,156,156,156,-345,156,-345,-28,156,156,-224,156,-226,156,-86,-75,156,-237,-240,-345,-201,-345,-68,156,156,156,-345,-28,156,156,-292,-225,156,156,156,156,156,156,-11,-292,156,156,-230,-87,-74,-232,-233,156,-345,156,156,156,-231,-234,156,156,-236,-235,]),'_ALIGNOF':([15,51,52,53,81,90,91,94,95,114,115,116,121,126,128,135,136,141,147,148,149,150,153,154,155,156,160,161,182,183,184,195,197,208,221,222,223,224,225,226,227,228,229,230,231,232,234,238,242,247,256,257,258,259,262,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,294,298,306,309,310,323,332,347,355,356,358,360,361,362,365,366,367,369,370,371,372,374,375,377,378,379,380,381,382,383,384,385,386,387,388,389,392,393,394,397,400,401,402,405,448,455,457,458,459,469,471,472,473,476,481,482,484,485,486,489,491,500,501,502,505,512,513,514,521,522,524,528,529,530,531,532,533,536,537,547,548,549,558,559,560,561,562,565,568,571,578,580,587,588,590,592,593,594,],[-71,-131,-132,-133,-134,-90,-72,157,-345,-27,-28,-185,-343,157,157,157,-345,157,-292,-293,-294,-291,157,157,157,157,-295,-296,157,-345,-28,157,-186,-344,157,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,157,157,157,157,-345,157,-345,-28,-73,-69,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,-292,157,157,-345,157,157,-223,-222,157,157,-242,157,157,157,157,157,-87,-74,157,-238,-239,-241,157,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-11,157,-12,157,-292,157,157,157,-345,157,-345,-28,157,157,-224,157,-226,157,-86,-75,157,-237,-240,-345,-201,-345,-68,157,157,157,-345,-28,157,157,-292,-225,157,157,157,157,157,157,-11,-292,157,157,-230,-87,-74,-232,-233,157,-345,157,157,157,-231,-234,157,157,-236,-235,]),'AND':([15,51,52,53,81,90,91,94,95,114,115,116,121,126,128,135,136,139,141,143,145,146,147,148,149,150,151,152,153,154,155,156,158,159,160,161,162,163,164,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,195,197,208,221,222,223,224,225,226,227,228,229,230,231,232,233,234,238,242,247,250,256,257,258,259,262,263,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,291,292,293,294,295,296,297,298,300,301,302,303,306,309,310,323,332,347,355,356,358,360,361,362,365,366,367,369,370,371,372,374,375,377,378,379,380,381,382,383,384,385,386,387,388,389,392,393,394,397,400,401,402,405,406,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,429,431,432,433,434,448,455,457,458,459,469,471,472,473,476,481,482,484,485,486,489,491,499,500,501,502,503,504,505,507,508,512,513,514,521,522,524,528,529,530,531,532,533,536,537,547,548,549,558,559,560,561,562,565,567,568,569,571,578,580,582,587,588,590,592,593,594,],[-71,-131,-132,-133,-134,-90,-72,150,-345,-27,-28,-185,-343,150,150,150,-345,-333,150,-311,282,-263,-292,-293,-294,-291,-282,-284,150,150,150,150,-297,-320,-295,-296,-309,-310,-312,-321,-322,-323,-324,-325,-326,-327,-328,-329,-330,-331,-332,-335,-336,-337,-338,150,-345,-28,150,-186,-344,150,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,-320,150,150,150,150,-282,-345,150,-345,-28,-73,-334,-69,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,-305,-306,-285,150,-286,-287,-288,150,-339,-340,-341,-342,-292,150,150,-345,150,150,-223,-222,150,150,-242,150,150,150,150,150,-87,-74,150,-238,-239,-241,150,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-11,150,-12,150,-292,150,150,150,-313,-264,-265,-266,-267,-268,-269,-270,-271,-272,-273,-274,-275,-276,-277,282,282,282,282,-300,-301,-302,-303,-304,-345,150,-345,-28,150,150,-224,150,-226,150,-86,-75,150,-237,-240,-345,-201,-283,-345,-68,150,-298,-299,150,-289,-290,150,-345,-28,150,150,-292,-225,150,150,150,150,150,150,-11,-292,150,150,-230,-87,-74,-232,-233,150,-307,-345,-314,150,150,150,-308,-231,-234,150,150,-236,-235,]),'PLUS':([15,51,52,53,81,90,91,94,95,114,115,116,121,126,128,135,136,139,141,143,145,146,147,148,149,150,151,152,153,154,155,156,158,159,160,161,162,163,164,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,195,197,208,221,222,223,224,225,226,227,228,229,230,231,232,233,234,238,242,247,250,256,257,258,259,262,263,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,291,292,293,294,295,296,297,298,300,301,302,303,306,309,310,323,332,347,355,356,358,360,361,362,365,366,367,369,370,371,372,374,375,377,378,379,380,381,382,383,384,385,386,387,388,389,392,393,394,397,400,401,402,405,406,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,429,431,432,433,434,448,455,457,458,459,469,471,472,473,476,481,482,484,485,486,489,491,499,500,501,502,503,504,505,507,508,512,513,514,521,522,524,528,529,530,531,532,533,536,537,547,548,549,558,559,560,561,562,565,567,568,569,571,578,580,582,587,588,590,592,593,594,],[-71,-131,-132,-133,-134,-90,-72,148,-345,-27,-28,-185,-343,148,148,148,-345,-333,148,-311,272,-263,-292,-293,-294,-291,-282,-284,148,148,148,148,-297,-320,-295,-296,-309,-310,-312,-321,-322,-323,-324,-325,-326,-327,-328,-329,-330,-331,-332,-335,-336,-337,-338,148,-345,-28,148,-186,-344,148,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,-320,148,148,148,148,-282,-345,148,-345,-28,-73,-334,-69,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,-305,-306,-285,148,-286,-287,-288,148,-339,-340,-341,-342,-292,148,148,-345,148,148,-223,-222,148,148,-242,148,148,148,148,148,-87,-74,148,-238,-239,-241,148,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-11,148,-12,148,-292,148,148,148,-313,-264,-265,-266,-267,-268,272,272,272,272,272,272,272,272,272,272,272,272,272,-300,-301,-302,-303,-304,-345,148,-345,-28,148,148,-224,148,-226,148,-86,-75,148,-237,-240,-345,-201,-283,-345,-68,148,-298,-299,148,-289,-290,148,-345,-28,148,148,-292,-225,148,148,148,148,148,148,-11,-292,148,148,-230,-87,-74,-232,-233,148,-307,-345,-314,148,148,148,-308,-231,-234,148,148,-236,-235,]),'MINUS':([15,51,52,53,81,90,91,94,95,114,115,116,121,126,128,135,136,139,141,143,145,146,147,148,149,150,151,152,153,154,155,156,158,159,160,161,162,163,164,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,195,197,208,221,222,223,224,225,226,227,228,229,230,231,232,233,234,238,242,247,250,256,257,258,259,262,263,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,291,292,293,294,295,296,297,298,300,301,302,303,306,309,310,323,332,347,355,356,358,360,361,362,365,366,367,369,370,371,372,374,375,377,378,379,380,381,382,383,384,385,386,387,388,389,392,393,394,397,400,401,402,405,406,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,429,431,432,433,434,448,455,457,458,459,469,471,472,473,476,481,482,484,485,486,489,491,499,500,501,502,503,504,505,507,508,512,513,514,521,522,524,528,529,530,531,532,533,536,537,547,548,549,558,559,560,561,562,565,567,568,569,571,578,580,582,587,588,590,592,593,594,],[-71,-131,-132,-133,-134,-90,-72,149,-345,-27,-28,-185,-343,149,149,149,-345,-333,149,-311,273,-263,-292,-293,-294,-291,-282,-284,149,149,149,149,-297,-320,-295,-296,-309,-310,-312,-321,-322,-323,-324,-325,-326,-327,-328,-329,-330,-331,-332,-335,-336,-337,-338,149,-345,-28,149,-186,-344,149,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,-320,149,149,149,149,-282,-345,149,-345,-28,-73,-334,-69,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,-305,-306,-285,149,-286,-287,-288,149,-339,-340,-341,-342,-292,149,149,-345,149,149,-223,-222,149,149,-242,149,149,149,149,149,-87,-74,149,-238,-239,-241,149,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-11,149,-12,149,-292,149,149,149,-313,-264,-265,-266,-267,-268,273,273,273,273,273,273,273,273,273,273,273,273,273,-300,-301,-302,-303,-304,-345,149,-345,-28,149,149,-224,149,-226,149,-86,-75,149,-237,-240,-345,-201,-283,-345,-68,149,-298,-299,149,-289,-290,149,-345,-28,149,149,-292,-225,149,149,149,149,149,149,-11,-292,149,149,-230,-87,-74,-232,-233,149,-307,-345,-314,149,149,149,-308,-231,-234,149,149,-236,-235,]),'NOT':([15,51,52,53,81,90,91,94,95,114,115,116,121,126,128,135,136,141,147,148,149,150,153,154,155,156,160,161,182,183,184,195,197,208,221,222,223,224,225,226,227,228,229,230,231,232,234,238,242,247,256,257,258,259,262,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,294,298,306,309,310,323,332,347,355,356,358,360,361,362,365,366,367,369,370,371,372,374,375,377,378,379,380,381,382,383,384,385,386,387,388,389,392,393,394,397,400,401,402,405,448,455,457,458,459,469,471,472,473,476,481,482,484,485,486,489,491,500,501,502,505,512,513,514,521,522,524,528,529,530,531,532,533,536,537,547,548,549,558,559,560,561,562,565,568,571,578,580,587,588,590,592,593,594,],[-71,-131,-132,-133,-134,-90,-72,160,-345,-27,-28,-185,-343,160,160,160,-345,160,-292,-293,-294,-291,160,160,160,160,-295,-296,160,-345,-28,160,-186,-344,160,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,160,160,160,160,-345,160,-345,-28,-73,-69,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,-292,160,160,-345,160,160,-223,-222,160,160,-242,160,160,160,160,160,-87,-74,160,-238,-239,-241,160,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-11,160,-12,160,-292,160,160,160,-345,160,-345,-28,160,160,-224,160,-226,160,-86,-75,160,-237,-240,-345,-201,-345,-68,160,160,160,-345,-28,160,160,-292,-225,160,160,160,160,160,160,-11,-292,160,160,-230,-87,-74,-232,-233,160,-345,160,160,160,-231,-234,160,160,-236,-235,]),'LNOT':([15,51,52,53,81,90,91,94,95,114,115,116,121,126,128,135,136,141,147,148,149,150,153,154,155,156,160,161,182,183,184,195,197,208,221,222,223,224,225,226,227,228,229,230,231,232,234,238,242,247,256,257,258,259,262,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,294,298,306,309,310,323,332,347,355,356,358,360,361,362,365,366,367,369,370,371,372,374,375,377,378,379,380,381,382,383,384,385,386,387,388,389,392,393,394,397,400,401,402,405,448,455,457,458,459,469,471,472,473,476,481,482,484,485,486,489,491,500,501,502,505,512,513,514,521,522,524,528,529,530,531,532,533,536,537,547,548,549,558,559,560,561,562,565,568,571,578,580,587,588,590,592,593,594,],[-71,-131,-132,-133,-134,-90,-72,161,-345,-27,-28,-185,-343,161,161,161,-345,161,-292,-293,-294,-291,161,161,161,161,-295,-296,161,-345,-28,161,-186,-344,161,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,161,161,161,161,-345,161,-345,-28,-73,-69,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,-292,161,161,-345,161,161,-223,-222,161,161,-242,161,161,161,161,161,-87,-74,161,-238,-239,-241,161,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-11,161,-12,161,-292,161,161,161,-345,161,-345,-28,161,161,-224,161,-226,161,-86,-75,161,-237,-240,-345,-201,-345,-68,161,161,161,-345,-28,161,161,-292,-225,161,161,161,161,161,161,-11,-292,161,161,-230,-87,-74,-232,-233,161,-345,161,161,161,-231,-234,161,161,-236,-235,]),'OFFSETOF':([15,51,52,53,81,90,91,94,95,114,115,116,121,126,128,135,136,141,147,148,149,150,153,154,155,156,160,161,182,183,184,195,197,208,221,222,223,224,225,226,227,228,229,230,231,232,234,238,242,247,256,257,258,259,262,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,294,298,306,309,310,323,332,347,355,356,358,360,361,362,365,366,367,369,370,371,372,374,375,377,378,379,380,381,382,383,384,385,386,387,388,389,392,393,394,397,400,401,402,405,448,455,457,458,459,469,471,472,473,476,481,482,484,485,486,489,491,500,501,502,505,512,513,514,521,522,524,528,529,530,531,532,533,536,537,547,548,549,558,559,560,561,562,565,568,571,578,580,587,588,590,592,593,594,],[-71,-131,-132,-133,-134,-90,-72,165,-345,-27,-28,-185,-343,165,165,165,-345,165,-292,-293,-294,-291,165,165,165,165,-295,-296,165,-345,-28,165,-186,-344,165,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,165,165,165,165,-345,165,-345,-28,-73,-69,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,-292,165,165,-345,165,165,-223,-222,165,165,-242,165,165,165,165,165,-87,-74,165,-238,-239,-241,165,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-11,165,-12,165,-292,165,165,165,-345,165,-345,-28,165,165,-224,165,-226,165,-86,-75,165,-237,-240,-345,-201,-345,-68,165,165,165,-345,-28,165,165,-292,-225,165,165,165,165,165,165,-11,-292,165,165,-230,-87,-74,-232,-233,165,-345,165,165,165,-231,-234,165,165,-236,-235,]),'INT_CONST_DEC':([15,51,52,53,81,90,91,94,95,114,115,116,121,126,128,135,136,141,147,148,149,150,153,154,155,156,160,161,182,183,184,195,197,208,221,222,223,224,225,226,227,228,229,230,231,232,234,238,242,247,256,257,258,259,262,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,294,298,306,309,310,323,332,347,355,356,358,360,361,362,365,366,367,369,370,371,372,374,375,377,378,379,380,381,382,383,384,385,386,387,388,389,392,393,394,397,400,401,402,405,448,455,457,458,459,469,471,472,473,476,481,482,484,485,486,489,491,500,501,502,505,512,513,514,521,522,524,528,529,530,531,532,533,536,537,547,548,549,558,559,560,561,562,565,568,571,578,580,587,588,590,592,593,594,],[-71,-131,-132,-133,-134,-90,-72,166,-345,-27,-28,-185,-343,166,166,166,-345,166,-292,-293,-294,-291,166,166,166,166,-295,-296,166,-345,-28,166,-186,-344,166,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,166,166,166,166,-345,166,-345,-28,-73,-69,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,-292,166,166,-345,166,166,-223,-222,166,166,-242,166,166,166,166,166,-87,-74,166,-238,-239,-241,166,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-11,166,-12,166,-292,166,166,166,-345,166,-345,-28,166,166,-224,166,-226,166,-86,-75,166,-237,-240,-345,-201,-345,-68,166,166,166,-345,-28,166,166,-292,-225,166,166,166,166,166,166,-11,-292,166,166,-230,-87,-74,-232,-233,166,-345,166,166,166,-231,-234,166,166,-236,-235,]),'INT_CONST_OCT':([15,51,52,53,81,90,91,94,95,114,115,116,121,126,128,135,136,141,147,148,149,150,153,154,155,156,160,161,182,183,184,195,197,208,221,222,223,224,225,226,227,228,229,230,231,232,234,238,242,247,256,257,258,259,262,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,294,298,306,309,310,323,332,347,355,356,358,360,361,362,365,366,367,369,370,371,372,374,375,377,378,379,380,381,382,383,384,385,386,387,388,389,392,393,394,397,400,401,402,405,448,455,457,458,459,469,471,472,473,476,481,482,484,485,486,489,491,500,501,502,505,512,513,514,521,522,524,528,529,530,531,532,533,536,537,547,548,549,558,559,560,561,562,565,568,571,578,580,587,588,590,592,593,594,],[-71,-131,-132,-133,-134,-90,-72,167,-345,-27,-28,-185,-343,167,167,167,-345,167,-292,-293,-294,-291,167,167,167,167,-295,-296,167,-345,-28,167,-186,-344,167,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,167,167,167,167,-345,167,-345,-28,-73,-69,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,-292,167,167,-345,167,167,-223,-222,167,167,-242,167,167,167,167,167,-87,-74,167,-238,-239,-241,167,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-11,167,-12,167,-292,167,167,167,-345,167,-345,-28,167,167,-224,167,-226,167,-86,-75,167,-237,-240,-345,-201,-345,-68,167,167,167,-345,-28,167,167,-292,-225,167,167,167,167,167,167,-11,-292,167,167,-230,-87,-74,-232,-233,167,-345,167,167,167,-231,-234,167,167,-236,-235,]),'INT_CONST_HEX':([15,51,52,53,81,90,91,94,95,114,115,116,121,126,128,135,136,141,147,148,149,150,153,154,155,156,160,161,182,183,184,195,197,208,221,222,223,224,225,226,227,228,229,230,231,232,234,238,242,247,256,257,258,259,262,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,294,298,306,309,310,323,332,347,355,356,358,360,361,362,365,366,367,369,370,371,372,374,375,377,378,379,380,381,382,383,384,385,386,387,388,389,392,393,394,397,400,401,402,405,448,455,457,458,459,469,471,472,473,476,481,482,484,485,486,489,491,500,501,502,505,512,513,514,521,522,524,528,529,530,531,532,533,536,537,547,548,549,558,559,560,561,562,565,568,571,578,580,587,588,590,592,593,594,],[-71,-131,-132,-133,-134,-90,-72,168,-345,-27,-28,-185,-343,168,168,168,-345,168,-292,-293,-294,-291,168,168,168,168,-295,-296,168,-345,-28,168,-186,-344,168,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,168,168,168,168,-345,168,-345,-28,-73,-69,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,-292,168,168,-345,168,168,-223,-222,168,168,-242,168,168,168,168,168,-87,-74,168,-238,-239,-241,168,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-11,168,-12,168,-292,168,168,168,-345,168,-345,-28,168,168,-224,168,-226,168,-86,-75,168,-237,-240,-345,-201,-345,-68,168,168,168,-345,-28,168,168,-292,-225,168,168,168,168,168,168,-11,-292,168,168,-230,-87,-74,-232,-233,168,-345,168,168,168,-231,-234,168,168,-236,-235,]),'INT_CONST_BIN':([15,51,52,53,81,90,91,94,95,114,115,116,121,126,128,135,136,141,147,148,149,150,153,154,155,156,160,161,182,183,184,195,197,208,221,222,223,224,225,226,227,228,229,230,231,232,234,238,242,247,256,257,258,259,262,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,294,298,306,309,310,323,332,347,355,356,358,360,361,362,365,366,367,369,370,371,372,374,375,377,378,379,380,381,382,383,384,385,386,387,388,389,392,393,394,397,400,401,402,405,448,455,457,458,459,469,471,472,473,476,481,482,484,485,486,489,491,500,501,502,505,512,513,514,521,522,524,528,529,530,531,532,533,536,537,547,548,549,558,559,560,561,562,565,568,571,578,580,587,588,590,592,593,594,],[-71,-131,-132,-133,-134,-90,-72,169,-345,-27,-28,-185,-343,169,169,169,-345,169,-292,-293,-294,-291,169,169,169,169,-295,-296,169,-345,-28,169,-186,-344,169,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,169,169,169,169,-345,169,-345,-28,-73,-69,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,-292,169,169,-345,169,169,-223,-222,169,169,-242,169,169,169,169,169,-87,-74,169,-238,-239,-241,169,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-11,169,-12,169,-292,169,169,169,-345,169,-345,-28,169,169,-224,169,-226,169,-86,-75,169,-237,-240,-345,-201,-345,-68,169,169,169,-345,-28,169,169,-292,-225,169,169,169,169,169,169,-11,-292,169,169,-230,-87,-74,-232,-233,169,-345,169,169,169,-231,-234,169,169,-236,-235,]),'INT_CONST_CHAR':([15,51,52,53,81,90,91,94,95,114,115,116,121,126,128,135,136,141,147,148,149,150,153,154,155,156,160,161,182,183,184,195,197,208,221,222,223,224,225,226,227,228,229,230,231,232,234,238,242,247,256,257,258,259,262,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,294,298,306,309,310,323,332,347,355,356,358,360,361,362,365,366,367,369,370,371,372,374,375,377,378,379,380,381,382,383,384,385,386,387,388,389,392,393,394,397,400,401,402,405,448,455,457,458,459,469,471,472,473,476,481,482,484,485,486,489,491,500,501,502,505,512,513,514,521,522,524,528,529,530,531,532,533,536,537,547,548,549,558,559,560,561,562,565,568,571,578,580,587,588,590,592,593,594,],[-71,-131,-132,-133,-134,-90,-72,170,-345,-27,-28,-185,-343,170,170,170,-345,170,-292,-293,-294,-291,170,170,170,170,-295,-296,170,-345,-28,170,-186,-344,170,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,170,170,170,170,-345,170,-345,-28,-73,-69,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,-292,170,170,-345,170,170,-223,-222,170,170,-242,170,170,170,170,170,-87,-74,170,-238,-239,-241,170,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-11,170,-12,170,-292,170,170,170,-345,170,-345,-28,170,170,-224,170,-226,170,-86,-75,170,-237,-240,-345,-201,-345,-68,170,170,170,-345,-28,170,170,-292,-225,170,170,170,170,170,170,-11,-292,170,170,-230,-87,-74,-232,-233,170,-345,170,170,170,-231,-234,170,170,-236,-235,]),'FLOAT_CONST':([15,51,52,53,81,90,91,94,95,114,115,116,121,126,128,135,136,141,147,148,149,150,153,154,155,156,160,161,182,183,184,195,197,208,221,222,223,224,225,226,227,228,229,230,231,232,234,238,242,247,256,257,258,259,262,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,294,298,306,309,310,323,332,347,355,356,358,360,361,362,365,366,367,369,370,371,372,374,375,377,378,379,380,381,382,383,384,385,386,387,388,389,392,393,394,397,400,401,402,405,448,455,457,458,459,469,471,472,473,476,481,482,484,485,486,489,491,500,501,502,505,512,513,514,521,522,524,528,529,530,531,532,533,536,537,547,548,549,558,559,560,561,562,565,568,571,578,580,587,588,590,592,593,594,],[-71,-131,-132,-133,-134,-90,-72,171,-345,-27,-28,-185,-343,171,171,171,-345,171,-292,-293,-294,-291,171,171,171,171,-295,-296,171,-345,-28,171,-186,-344,171,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,171,171,171,171,-345,171,-345,-28,-73,-69,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,-292,171,171,-345,171,171,-223,-222,171,171,-242,171,171,171,171,171,-87,-74,171,-238,-239,-241,171,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-11,171,-12,171,-292,171,171,171,-345,171,-345,-28,171,171,-224,171,-226,171,-86,-75,171,-237,-240,-345,-201,-345,-68,171,171,171,-345,-28,171,171,-292,-225,171,171,171,171,171,171,-11,-292,171,171,-230,-87,-74,-232,-233,171,-345,171,171,171,-231,-234,171,171,-236,-235,]),'HEX_FLOAT_CONST':([15,51,52,53,81,90,91,94,95,114,115,116,121,126,128,135,136,141,147,148,149,150,153,154,155,156,160,161,182,183,184,195,197,208,221,222,223,224,225,226,227,228,229,230,231,232,234,238,242,247,256,257,258,259,262,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,294,298,306,309,310,323,332,347,355,356,358,360,361,362,365,366,367,369,370,371,372,374,375,377,378,379,380,381,382,383,384,385,386,387,388,389,392,393,394,397,400,401,402,405,448,455,457,458,459,469,471,472,473,476,481,482,484,485,486,489,491,500,501,502,505,512,513,514,521,522,524,528,529,530,531,532,533,536,537,547,548,549,558,559,560,561,562,565,568,571,578,580,587,588,590,592,593,594,],[-71,-131,-132,-133,-134,-90,-72,172,-345,-27,-28,-185,-343,172,172,172,-345,172,-292,-293,-294,-291,172,172,172,172,-295,-296,172,-345,-28,172,-186,-344,172,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,172,172,172,172,-345,172,-345,-28,-73,-69,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,-292,172,172,-345,172,172,-223,-222,172,172,-242,172,172,172,172,172,-87,-74,172,-238,-239,-241,172,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-11,172,-12,172,-292,172,172,172,-345,172,-345,-28,172,172,-224,172,-226,172,-86,-75,172,-237,-240,-345,-201,-345,-68,172,172,172,-345,-28,172,172,-292,-225,172,172,172,172,172,172,-11,-292,172,172,-230,-87,-74,-232,-233,172,-345,172,172,172,-231,-234,172,172,-236,-235,]),'CHAR_CONST':([15,51,52,53,81,90,91,94,95,114,115,116,121,126,128,135,136,141,147,148,149,150,153,154,155,156,160,161,182,183,184,195,197,208,221,222,223,224,225,226,227,228,229,230,231,232,234,238,242,247,256,257,258,259,262,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,294,298,306,309,310,323,332,347,355,356,358,360,361,362,365,366,367,369,370,371,372,374,375,377,378,379,380,381,382,383,384,385,386,387,388,389,392,393,394,397,400,401,402,405,448,455,457,458,459,469,471,472,473,476,481,482,484,485,486,489,491,500,501,502,505,512,513,514,521,522,524,528,529,530,531,532,533,536,537,547,548,549,558,559,560,561,562,565,568,571,578,580,587,588,590,592,593,594,],[-71,-131,-132,-133,-134,-90,-72,173,-345,-27,-28,-185,-343,173,173,173,-345,173,-292,-293,-294,-291,173,173,173,173,-295,-296,173,-345,-28,173,-186,-344,173,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,173,173,173,173,-345,173,-345,-28,-73,-69,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,-292,173,173,-345,173,173,-223,-222,173,173,-242,173,173,173,173,173,-87,-74,173,-238,-239,-241,173,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-11,173,-12,173,-292,173,173,173,-345,173,-345,-28,173,173,-224,173,-226,173,-86,-75,173,-237,-240,-345,-201,-345,-68,173,173,173,-345,-28,173,173,-292,-225,173,173,173,173,173,173,-11,-292,173,173,-230,-87,-74,-232,-233,173,-345,173,173,173,-231,-234,173,173,-236,-235,]),'WCHAR_CONST':([15,51,52,53,81,90,91,94,95,114,115,116,121,126,128,135,136,141,147,148,149,150,153,154,155,156,160,161,182,183,184,195,197,208,221,222,223,224,225,226,227,228,229,230,231,232,234,238,242,247,256,257,258,259,262,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,294,298,306,309,310,323,332,347,355,356,358,360,361,362,365,366,367,369,370,371,372,374,375,377,378,379,380,381,382,383,384,385,386,387,388,389,392,393,394,397,400,401,402,405,448,455,457,458,459,469,471,472,473,476,481,482,484,485,486,489,491,500,501,502,505,512,513,514,521,522,524,528,529,530,531,532,533,536,537,547,548,549,558,559,560,561,562,565,568,571,578,580,587,588,590,592,593,594,],[-71,-131,-132,-133,-134,-90,-72,174,-345,-27,-28,-185,-343,174,174,174,-345,174,-292,-293,-294,-291,174,174,174,174,-295,-296,174,-345,-28,174,-186,-344,174,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,174,174,174,174,-345,174,-345,-28,-73,-69,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,-292,174,174,-345,174,174,-223,-222,174,174,-242,174,174,174,174,174,-87,-74,174,-238,-239,-241,174,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-11,174,-12,174,-292,174,174,174,-345,174,-345,-28,174,174,-224,174,-226,174,-86,-75,174,-237,-240,-345,-201,-345,-68,174,174,174,-345,-28,174,174,-292,-225,174,174,174,174,174,174,-11,-292,174,174,-230,-87,-74,-232,-233,174,-345,174,174,174,-231,-234,174,174,-236,-235,]),'U8CHAR_CONST':([15,51,52,53,81,90,91,94,95,114,115,116,121,126,128,135,136,141,147,148,149,150,153,154,155,156,160,161,182,183,184,195,197,208,221,222,223,224,225,226,227,228,229,230,231,232,234,238,242,247,256,257,258,259,262,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,294,298,306,309,310,323,332,347,355,356,358,360,361,362,365,366,367,369,370,371,372,374,375,377,378,379,380,381,382,383,384,385,386,387,388,389,392,393,394,397,400,401,402,405,448,455,457,458,459,469,471,472,473,476,481,482,484,485,486,489,491,500,501,502,505,512,513,514,521,522,524,528,529,530,531,532,533,536,537,547,548,549,558,559,560,561,562,565,568,571,578,580,587,588,590,592,593,594,],[-71,-131,-132,-133,-134,-90,-72,175,-345,-27,-28,-185,-343,175,175,175,-345,175,-292,-293,-294,-291,175,175,175,175,-295,-296,175,-345,-28,175,-186,-344,175,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,175,175,175,175,-345,175,-345,-28,-73,-69,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,-292,175,175,-345,175,175,-223,-222,175,175,-242,175,175,175,175,175,-87,-74,175,-238,-239,-241,175,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-11,175,-12,175,-292,175,175,175,-345,175,-345,-28,175,175,-224,175,-226,175,-86,-75,175,-237,-240,-345,-201,-345,-68,175,175,175,-345,-28,175,175,-292,-225,175,175,175,175,175,175,-11,-292,175,175,-230,-87,-74,-232,-233,175,-345,175,175,175,-231,-234,175,175,-236,-235,]),'U16CHAR_CONST':([15,51,52,53,81,90,91,94,95,114,115,116,121,126,128,135,136,141,147,148,149,150,153,154,155,156,160,161,182,183,184,195,197,208,221,222,223,224,225,226,227,228,229,230,231,232,234,238,242,247,256,257,258,259,262,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,294,298,306,309,310,323,332,347,355,356,358,360,361,362,365,366,367,369,370,371,372,374,375,377,378,379,380,381,382,383,384,385,386,387,388,389,392,393,394,397,400,401,402,405,448,455,457,458,459,469,471,472,473,476,481,482,484,485,486,489,491,500,501,502,505,512,513,514,521,522,524,528,529,530,531,532,533,536,537,547,548,549,558,559,560,561,562,565,568,571,578,580,587,588,590,592,593,594,],[-71,-131,-132,-133,-134,-90,-72,176,-345,-27,-28,-185,-343,176,176,176,-345,176,-292,-293,-294,-291,176,176,176,176,-295,-296,176,-345,-28,176,-186,-344,176,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,176,176,176,176,-345,176,-345,-28,-73,-69,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,-292,176,176,-345,176,176,-223,-222,176,176,-242,176,176,176,176,176,-87,-74,176,-238,-239,-241,176,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-11,176,-12,176,-292,176,176,176,-345,176,-345,-28,176,176,-224,176,-226,176,-86,-75,176,-237,-240,-345,-201,-345,-68,176,176,176,-345,-28,176,176,-292,-225,176,176,176,176,176,176,-11,-292,176,176,-230,-87,-74,-232,-233,176,-345,176,176,176,-231,-234,176,176,-236,-235,]),'U32CHAR_CONST':([15,51,52,53,81,90,91,94,95,114,115,116,121,126,128,135,136,141,147,148,149,150,153,154,155,156,160,161,182,183,184,195,197,208,221,222,223,224,225,226,227,228,229,230,231,232,234,238,242,247,256,257,258,259,262,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,294,298,306,309,310,323,332,347,355,356,358,360,361,362,365,366,367,369,370,371,372,374,375,377,378,379,380,381,382,383,384,385,386,387,388,389,392,393,394,397,400,401,402,405,448,455,457,458,459,469,471,472,473,476,481,482,484,485,486,489,491,500,501,502,505,512,513,514,521,522,524,528,529,530,531,532,533,536,537,547,548,549,558,559,560,561,562,565,568,571,578,580,587,588,590,592,593,594,],[-71,-131,-132,-133,-134,-90,-72,177,-345,-27,-28,-185,-343,177,177,177,-345,177,-292,-293,-294,-291,177,177,177,177,-295,-296,177,-345,-28,177,-186,-344,177,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,177,177,177,177,-345,177,-345,-28,-73,-69,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,-292,177,177,-345,177,177,-223,-222,177,177,-242,177,177,177,177,177,-87,-74,177,-238,-239,-241,177,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-11,177,-12,177,-292,177,177,177,-345,177,-345,-28,177,177,-224,177,-226,177,-86,-75,177,-237,-240,-345,-201,-345,-68,177,177,177,-345,-28,177,177,-292,-225,177,177,177,177,177,177,-11,-292,177,177,-230,-87,-74,-232,-233,177,-345,177,177,177,-231,-234,177,177,-236,-235,]),'STRING_LITERAL':([15,51,52,53,81,90,91,92,94,95,114,115,116,121,126,128,135,136,138,139,141,143,147,148,149,150,153,154,155,156,160,161,182,183,184,195,197,208,221,222,223,224,225,226,227,228,229,230,231,232,234,238,242,247,256,257,258,259,262,263,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,294,298,306,309,310,323,332,347,355,356,358,360,361,362,365,366,367,369,370,371,372,374,375,377,378,379,380,381,382,383,384,385,386,387,388,389,392,393,394,397,400,401,402,405,407,448,455,457,458,459,469,471,472,473,476,481,482,484,485,486,489,491,500,501,502,505,512,513,514,521,522,524,528,529,530,531,532,533,536,537,547,548,549,558,559,560,561,562,565,568,571,578,580,587,588,590,592,593,594,],[-71,-131,-132,-133,-134,-90,-72,139,139,-345,-27,-28,-185,-343,139,139,139,-345,263,-333,139,263,-292,-293,-294,-291,139,139,139,139,-295,-296,139,-345,-28,139,-186,-344,139,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,139,139,139,139,-345,139,-345,-28,-73,-334,139,-69,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,-292,139,139,-345,139,139,-223,-222,139,139,-242,139,139,139,139,139,-87,-74,139,-238,-239,-241,139,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-11,139,-12,139,-292,139,139,139,263,-345,139,-345,-28,139,139,-224,139,-226,139,-86,-75,139,-237,-240,-345,-201,-345,-68,139,139,139,-345,-28,139,139,-292,-225,139,139,139,139,139,139,-11,-292,139,139,-230,-87,-74,-232,-233,139,-345,139,139,139,-231,-234,139,139,-236,-235,]),'WSTRING_LITERAL':([15,51,52,53,81,90,91,94,95,114,115,116,121,126,128,135,136,141,147,148,149,150,153,154,155,156,160,161,164,178,179,180,181,182,183,184,195,197,208,221,222,223,224,225,226,227,228,229,230,231,232,234,238,242,247,256,257,258,259,262,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,294,298,300,301,302,303,306,309,310,323,332,347,355,356,358,360,361,362,365,366,367,369,370,371,372,374,375,377,378,379,380,381,382,383,384,385,386,387,388,389,392,393,394,397,400,401,402,405,448,455,457,458,459,469,471,472,473,476,481,482,484,485,486,489,491,500,501,502,505,512,513,514,521,522,524,528,529,530,531,532,533,536,537,547,548,549,558,559,560,561,562,565,568,571,578,580,587,588,590,592,593,594,],[-71,-131,-132,-133,-134,-90,-72,178,-345,-27,-28,-185,-343,178,178,178,-345,178,-292,-293,-294,-291,178,178,178,178,-295,-296,300,-335,-336,-337,-338,178,-345,-28,178,-186,-344,178,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,178,178,178,178,-345,178,-345,-28,-73,-69,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,-339,-340,-341,-342,-292,178,178,-345,178,178,-223,-222,178,178,-242,178,178,178,178,178,-87,-74,178,-238,-239,-241,178,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-11,178,-12,178,-292,178,178,178,-345,178,-345,-28,178,178,-224,178,-226,178,-86,-75,178,-237,-240,-345,-201,-345,-68,178,178,178,-345,-28,178,178,-292,-225,178,178,178,178,178,178,-11,-292,178,178,-230,-87,-74,-232,-233,178,-345,178,178,178,-231,-234,178,178,-236,-235,]),'U8STRING_LITERAL':([15,51,52,53,81,90,91,94,95,114,115,116,121,126,128,135,136,141,147,148,149,150,153,154,155,156,160,161,164,178,179,180,181,182,183,184,195,197,208,221,222,223,224,225,226,227,228,229,230,231,232,234,238,242,247,256,257,258,259,262,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,294,298,300,301,302,303,306,309,310,323,332,347,355,356,358,360,361,362,365,366,367,369,370,371,372,374,375,377,378,379,380,381,382,383,384,385,386,387,388,389,392,393,394,397,400,401,402,405,448,455,457,458,459,469,471,472,473,476,481,482,484,485,486,489,491,500,501,502,505,512,513,514,521,522,524,528,529,530,531,532,533,536,537,547,548,549,558,559,560,561,562,565,568,571,578,580,587,588,590,592,593,594,],[-71,-131,-132,-133,-134,-90,-72,179,-345,-27,-28,-185,-343,179,179,179,-345,179,-292,-293,-294,-291,179,179,179,179,-295,-296,301,-335,-336,-337,-338,179,-345,-28,179,-186,-344,179,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,179,179,179,179,-345,179,-345,-28,-73,-69,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,-339,-340,-341,-342,-292,179,179,-345,179,179,-223,-222,179,179,-242,179,179,179,179,179,-87,-74,179,-238,-239,-241,179,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-11,179,-12,179,-292,179,179,179,-345,179,-345,-28,179,179,-224,179,-226,179,-86,-75,179,-237,-240,-345,-201,-345,-68,179,179,179,-345,-28,179,179,-292,-225,179,179,179,179,179,179,-11,-292,179,179,-230,-87,-74,-232,-233,179,-345,179,179,179,-231,-234,179,179,-236,-235,]),'U16STRING_LITERAL':([15,51,52,53,81,90,91,94,95,114,115,116,121,126,128,135,136,141,147,148,149,150,153,154,155,156,160,161,164,178,179,180,181,182,183,184,195,197,208,221,222,223,224,225,226,227,228,229,230,231,232,234,238,242,247,256,257,258,259,262,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,294,298,300,301,302,303,306,309,310,323,332,347,355,356,358,360,361,362,365,366,367,369,370,371,372,374,375,377,378,379,380,381,382,383,384,385,386,387,388,389,392,393,394,397,400,401,402,405,448,455,457,458,459,469,471,472,473,476,481,482,484,485,486,489,491,500,501,502,505,512,513,514,521,522,524,528,529,530,531,532,533,536,537,547,548,549,558,559,560,561,562,565,568,571,578,580,587,588,590,592,593,594,],[-71,-131,-132,-133,-134,-90,-72,180,-345,-27,-28,-185,-343,180,180,180,-345,180,-292,-293,-294,-291,180,180,180,180,-295,-296,302,-335,-336,-337,-338,180,-345,-28,180,-186,-344,180,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,180,180,180,180,-345,180,-345,-28,-73,-69,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,-339,-340,-341,-342,-292,180,180,-345,180,180,-223,-222,180,180,-242,180,180,180,180,180,-87,-74,180,-238,-239,-241,180,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-11,180,-12,180,-292,180,180,180,-345,180,-345,-28,180,180,-224,180,-226,180,-86,-75,180,-237,-240,-345,-201,-345,-68,180,180,180,-345,-28,180,180,-292,-225,180,180,180,180,180,180,-11,-292,180,180,-230,-87,-74,-232,-233,180,-345,180,180,180,-231,-234,180,180,-236,-235,]),'U32STRING_LITERAL':([15,51,52,53,81,90,91,94,95,114,115,116,121,126,128,135,136,141,147,148,149,150,153,154,155,156,160,161,164,178,179,180,181,182,183,184,195,197,208,221,222,223,224,225,226,227,228,229,230,231,232,234,238,242,247,256,257,258,259,262,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,294,298,300,301,302,303,306,309,310,323,332,347,355,356,358,360,361,362,365,366,367,369,370,371,372,374,375,377,378,379,380,381,382,383,384,385,386,387,388,389,392,393,394,397,400,401,402,405,448,455,457,458,459,469,471,472,473,476,481,482,484,485,486,489,491,500,501,502,505,512,513,514,521,522,524,528,529,530,531,532,533,536,537,547,548,549,558,559,560,561,562,565,568,571,578,580,587,588,590,592,593,594,],[-71,-131,-132,-133,-134,-90,-72,181,-345,-27,-28,-185,-343,181,181,181,-345,181,-292,-293,-294,-291,181,181,181,181,-295,-296,303,-335,-336,-337,-338,181,-345,-28,181,-186,-344,181,-221,-219,-220,-78,-79,-80,-81,-82,-83,-84,-85,181,181,181,181,-345,181,-345,-28,-73,-69,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,-339,-340,-341,-342,-292,181,181,-345,181,181,-223,-222,181,181,-242,181,181,181,181,181,-87,-74,181,-238,-239,-241,181,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-11,181,-12,181,-292,181,181,181,-345,181,-345,-28,181,181,-224,181,-226,181,-86,-75,181,-237,-240,-345,-201,-345,-68,181,181,181,-345,-28,181,181,-292,-225,181,181,181,181,181,181,-11,-292,181,181,-230,-87,-74,-232,-233,181,-345,181,181,181,-231,-234,181,181,-236,-235,]),'ELSE':([15,91,208,225,226,227,228,229,230,232,262,267,355,358,360,361,370,371,374,375,377,471,472,473,481,482,485,486,501,528,558,559,560,561,562,587,588,593,594,],[-71,-72,-344,-78,-79,-80,-81,-82,-83,-85,-73,-69,-223,-227,-229,-242,-87,-84,-238,-239,-241,-224,-228,-226,-86,-84,-237,-240,-68,-225,-230,578,-84,-232,-233,-231,-234,-236,-235,]),'PPPRAGMASTR':([15,],[91,]),'EQUALS':([19,28,73,86,87,88,89,97,111,130,132,139,140,143,151,152,158,159,162,163,164,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,202,208,233,250,252,263,291,292,293,295,296,297,300,301,302,303,311,312,395,396,403,404,406,429,431,432,433,434,439,440,492,494,495,496,499,503,504,507,508,510,511,538,539,540,567,569,582,],[-52,-29,-181,135,-182,-54,-37,-53,195,-181,-55,-333,-30,-311,-282,-284,-297,-320,-309,-310,-312,-321,-322,-323,-324,-325,-326,-327,-328,-329,-330,-331,-332,-335,-336,-337,-338,332,-344,-320,379,-38,-334,-305,-306,-285,-286,-287,-288,-339,-340,-341,-342,-35,-36,491,-202,-43,-44,-313,-300,-301,-302,-303,-304,-31,-34,-203,-205,-39,-42,-283,-298,-299,-289,-290,-32,-33,-204,-40,-41,-307,-314,-308,]),'COMMA':([19,24,25,28,29,30,31,32,33,34,35,38,39,40,41,42,43,44,45,46,47,48,49,51,52,53,54,55,56,57,58,59,60,73,74,75,76,77,78,81,84,85,86,87,88,89,97,104,106,108,110,111,113,114,115,116,118,119,122,123,130,132,139,140,142,143,144,145,146,151,152,158,159,162,163,164,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,187,189,190,191,192,196,197,200,201,202,206,208,212,214,216,233,239,248,249,250,252,253,254,255,263,265,291,292,293,295,296,297,300,301,302,303,311,312,315,316,317,318,319,320,321,324,325,326,327,328,329,330,331,334,336,337,340,341,342,344,345,346,348,349,350,352,353,354,376,391,403,404,406,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,438,439,440,444,445,446,447,461,462,463,464,465,466,470,474,475,477,478,479,487,488,490,495,496,499,503,504,507,508,510,511,517,518,520,526,527,535,539,540,541,542,543,550,551,552,555,556,557,563,566,567,569,572,573,576,577,582,584,585,586,],[-52,-128,-102,-29,-107,-345,-125,-126,-127,-129,-246,-113,-114,-115,-116,-117,-118,-119,-120,-121,-122,-123,-124,-131,-132,-133,-105,-106,-108,-109,-110,-111,-112,-181,-98,-99,-100,-101,-104,-134,134,-135,-137,-182,-54,-37,-53,-103,-129,194,-139,-141,-183,-27,-28,-185,-169,-170,-149,-150,-181,-55,-333,-30,266,-311,-260,-261,-263,-282,-284,-297,-320,-309,-310,-312,-321,-322,-323,-324,-325,-326,-327,-328,-329,-330,-331,-332,-335,-336,-337,-338,313,314,-189,-194,-345,-184,-186,331,-174,-179,-152,-344,-145,-147,-345,-320,365,-243,-247,-282,-38,-136,-138,-196,-334,365,-305,-306,-285,-286,-287,-288,-339,-340,-341,-342,-35,-36,-191,-192,-193,-207,-56,-1,-2,-45,-209,-140,-142,331,331,-171,-175,-154,-156,-151,-143,-144,-148,468,-164,-166,-146,-130,-206,-207,-177,-178,365,489,-43,-44,-313,365,-264,-265,-266,-267,-268,-269,-270,-271,-272,-273,-274,-275,-276,-277,-278,-279,-280,-281,365,505,-300,-318,-301,-302,-303,-304,509,-31,-34,-190,-195,-57,-208,-172,-173,-176,-180,-153,-155,-168,365,-245,-244,365,365,-248,-197,-199,-39,-42,-283,-298,-299,-289,-290,-32,-33,-210,-216,-214,-165,-167,-198,-40,-41,568,-262,-319,-50,-51,-212,-211,-213,-215,365,-200,-307,-314,-46,-49,-217,-218,-308,365,-47,-48,]),'RPAREN':([19,24,25,28,29,30,31,32,33,34,35,38,39,40,41,42,43,44,45,46,47,48,49,51,52,53,54,55,56,57,58,59,60,74,75,76,77,78,81,88,89,93,96,97,104,106,113,114,115,116,118,119,122,123,132,133,137,138,139,140,142,143,144,145,146,151,152,158,159,162,163,164,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,185,186,187,188,189,190,191,192,196,197,206,208,212,214,215,216,217,218,239,248,249,250,252,260,261,263,264,265,288,291,292,293,295,296,297,300,301,302,303,311,312,315,316,317,318,319,320,321,322,324,325,330,334,336,337,340,341,342,348,349,350,351,352,353,354,355,357,363,364,403,404,406,407,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,428,429,430,431,432,433,434,435,436,437,439,440,443,444,445,446,447,449,450,451,452,453,454,460,461,462,465,466,474,475,477,478,479,487,495,496,499,503,504,507,508,510,511,515,516,517,518,520,525,539,540,542,543,544,545,550,551,552,555,556,557,563,565,567,569,572,573,576,577,580,581,582,583,585,586,589,591,],[-52,-128,-102,-29,-107,-345,-125,-126,-127,-129,-246,-113,-114,-115,-116,-117,-118,-119,-120,-121,-122,-123,-124,-131,-132,-133,-105,-106,-108,-109,-110,-111,-112,-98,-99,-100,-101,-104,-134,-54,-37,140,-345,-53,-103,-129,-183,-27,-28,-185,-169,-170,-149,-150,-55,252,-345,262,-333,-30,267,-311,-260,-261,-263,-282,-284,-297,-320,-309,-310,-312,-321,-322,-323,-324,-325,-326,-327,-328,-329,-330,-331,-332,-335,-336,-337,-338,311,312,-187,-17,-18,-189,-194,-345,-184,-186,-152,-344,-145,-147,349,-345,353,354,-14,-243,-247,-282,-38,403,404,-334,405,406,429,-305,-306,-285,-286,-287,-288,-339,-340,-341,-342,-35,-36,-191,-192,-193,-207,-56,-1,-2,-345,-45,-209,-171,-154,-156,-151,-143,-144,-148,-146,-130,-206,-345,-207,-177,-178,-223,-13,475,476,-43,-44,-313,501,-264,-265,-266,-267,-268,-269,-270,-271,-272,-273,-274,-275,-276,-277,-278,-279,-280,-281,504,-300,-318,-301,-302,-303,-304,506,507,508,-31,-34,-188,-190,-195,-57,-208,-345,517,518,-207,-23,-24,-345,-172,-173,-153,-155,529,-245,-244,530,531,-248,-39,-42,-283,-298,-299,-289,-290,-32,-33,550,551,-210,-216,-214,557,-40,-41,-262,-319,569,-315,-50,-51,-212,-211,-213,-215,579,-345,-307,-314,-46,-49,-217,-218,-345,590,-308,-316,-47,-48,592,-317,]),'COLON':([19,24,28,31,32,33,35,38,39,40,41,42,43,44,45,46,47,48,49,51,52,53,81,87,88,89,97,106,118,119,122,123,130,132,139,140,143,144,145,146,151,152,158,159,162,163,164,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,206,208,209,212,214,233,235,248,249,250,252,263,291,292,293,295,296,297,300,301,302,303,311,312,330,334,336,337,340,341,342,346,348,349,353,354,359,403,404,406,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,429,431,432,433,434,439,440,461,462,465,466,468,475,477,487,495,496,499,503,504,507,508,510,511,539,540,542,567,569,582,],[-52,-128,-29,-125,-126,-127,-246,-113,-114,-115,-116,-117,-118,-119,-120,-121,-122,-123,-124,-131,-132,-133,-134,-182,-54,-37,-53,-129,-169,-170,-149,-150,-181,-55,-333,-30,-311,-260,-261,-263,-282,-284,-297,-320,-309,-310,-312,-321,-322,-323,-324,-325,-326,-327,-328,-329,-330,-331,-332,-335,-336,-337,-338,-152,-344,347,-145,-147,358,360,-243,-247,-282,-38,-334,-305,-306,-285,-286,-287,-288,-339,-340,-341,-342,-35,-36,-171,-154,-156,-151,-143,-144,-148,469,-146,-130,-177,-178,472,-43,-44,-313,502,-264,-265,-266,-267,-268,-269,-270,-271,-272,-273,-274,-275,-276,-277,-278,-279,-280,-281,-300,-301,-302,-303,-304,-31,-34,-172,-173,-153,-155,347,-245,-244,-248,-39,-42,-283,-298,-299,-289,-290,-32,-33,-40,-41,-262,-307,-314,-308,]),'LBRACKET':([19,24,25,28,29,30,31,32,33,34,35,38,39,40,41,42,43,44,45,46,47,48,49,51,52,53,54,55,56,57,58,59,60,74,75,76,77,78,81,88,89,97,104,106,113,114,115,116,118,119,121,122,123,132,139,140,143,152,158,159,162,163,164,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,192,196,197,206,208,212,214,216,233,252,256,263,291,292,300,301,302,303,311,312,318,319,322,324,325,330,334,336,337,340,341,342,348,349,351,352,353,354,395,396,403,404,406,429,431,432,433,434,439,440,446,447,452,461,462,465,466,489,492,494,495,496,500,503,504,510,511,517,518,520,538,539,540,544,545,550,551,552,555,556,557,567,568,569,572,573,576,577,582,583,585,586,591,],[95,-128,-102,-29,-107,-345,-125,-126,-127,-129,-246,-113,-114,-115,-116,-117,-118,-119,-120,-121,-122,-123,-124,-131,-132,-133,-105,-106,-108,-109,-110,-111,-112,-98,-99,-100,-101,-104,-134,136,-37,95,-103,-129,-183,-27,-28,-185,-169,-170,-343,-149,-150,136,-333,-30,-311,287,-297,-320,-309,-310,-312,-321,-322,-323,-324,-325,-326,-327,-328,-329,-330,-331,-332,-335,-336,-337,-338,323,-184,-186,-152,-344,-145,-147,323,-320,-38,397,-334,-305,-306,-339,-340,-341,-342,-35,-36,323,448,323,-45,459,-171,-154,-156,-151,-143,-144,-148,-146,-130,323,323,-177,-178,397,-202,-43,-44,-313,-300,-301,-302,-303,-304,-31,-34,448,459,323,-172,-173,-153,-155,397,-203,-205,-39,-42,397,-298,-299,-32,-33,-210,-216,-214,-204,-40,-41,571,-315,-50,-51,-212,-211,-213,-215,-307,397,-314,-46,-49,-217,-218,-308,-316,-47,-48,-317,]),'RBRACKET':([51,52,53,81,95,114,116,136,139,143,144,145,146,151,152,158,159,162,163,164,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,184,197,208,248,249,250,257,259,263,291,292,293,295,296,297,300,301,302,303,305,306,307,308,323,399,400,406,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,429,431,432,433,434,441,442,448,455,456,458,459,475,477,487,493,497,498,499,503,504,507,508,512,514,519,523,524,542,546,547,553,554,567,569,574,575,582,584,],[-131,-132,-133,-134,-345,-27,-185,-345,-333,-311,-260,-261,-263,-282,-284,-297,-320,-309,-310,-312,-321,-322,-323,-324,-325,-326,-327,-328,-329,-330,-331,-332,-335,-336,-337,-338,-345,-28,-186,-344,-243,-247,-282,-345,-28,-334,-305,-306,-285,-286,-287,-288,-339,-340,-341,-342,439,440,-3,-4,-345,495,496,-313,-264,-265,-266,-267,-268,-269,-270,-271,-272,-273,-274,-275,-276,-277,-278,-279,-280,-281,503,-300,-301,-302,-303,-304,510,511,-345,-345,520,-28,-345,-245,-244,-248,538,539,540,-283,-298,-299,-289,-290,-345,-28,552,555,556,-262,572,573,576,577,-307,-314,585,586,-308,591,]),'PERIOD':([121,139,143,152,158,159,162,163,164,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,208,233,256,263,291,292,300,301,302,303,395,396,406,429,431,432,433,434,489,492,494,500,503,504,538,544,545,567,568,569,582,583,591,],[-343,-333,-311,289,-297,-320,-309,-310,-312,-321,-322,-323,-324,-325,-326,-327,-328,-329,-330,-331,-332,-335,-336,-337,-338,-344,-320,398,-334,-305,-306,-339,-340,-341,-342,398,-202,-313,-300,-301,-302,-303,-304,398,-203,-205,398,-298,-299,-204,570,-315,-307,398,-314,-308,-316,-317,]),'ARROW':([139,143,152,158,159,162,163,164,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,208,233,263,291,292,300,301,302,303,406,429,431,432,433,434,503,504,567,569,582,],[-333,-311,290,-297,-320,-309,-310,-312,-321,-322,-323,-324,-325,-326,-327,-328,-329,-330,-331,-332,-335,-336,-337,-338,-344,-320,-334,-305,-306,-339,-340,-341,-342,-313,-300,-301,-302,-303,-304,-298,-299,-307,-314,-308,]),'CONDOP':([139,143,145,146,151,152,158,159,162,163,164,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,208,233,250,263,291,292,293,295,296,297,300,301,302,303,406,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,429,431,432,433,434,499,503,504,507,508,567,569,582,],[-333,-311,268,-263,-282,-284,-297,-320,-309,-310,-312,-321,-322,-323,-324,-325,-326,-327,-328,-329,-330,-331,-332,-335,-336,-337,-338,-344,-320,-282,-334,-305,-306,-285,-286,-287,-288,-339,-340,-341,-342,-313,-264,-265,-266,-267,-268,-269,-270,-271,-272,-273,-274,-275,-276,-277,-278,-279,-280,-281,-300,-301,-302,-303,-304,-283,-298,-299,-289,-290,-307,-314,-308,]),'DIVIDE':([139,143,145,146,151,152,158,159,162,163,164,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,208,233,250,263,291,292,293,295,296,297,300,301,302,303,406,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,429,431,432,433,434,499,503,504,507,508,567,569,582,],[-333,-311,270,-263,-282,-284,-297,-320,-309,-310,-312,-321,-322,-323,-324,-325,-326,-327,-328,-329,-330,-331,-332,-335,-336,-337,-338,-344,-320,-282,-334,-305,-306,-285,-286,-287,-288,-339,-340,-341,-342,-313,-264,-265,-266,270,270,270,270,270,270,270,270,270,270,270,270,270,270,270,-300,-301,-302,-303,-304,-283,-298,-299,-289,-290,-307,-314,-308,]),'MOD':([139,143,145,146,151,152,158,159,162,163,164,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,208,233,250,263,291,292,293,295,296,297,300,301,302,303,406,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,429,431,432,433,434,499,503,504,507,508,567,569,582,],[-333,-311,271,-263,-282,-284,-297,-320,-309,-310,-312,-321,-322,-323,-324,-325,-326,-327,-328,-329,-330,-331,-332,-335,-336,-337,-338,-344,-320,-282,-334,-305,-306,-285,-286,-287,-288,-339,-340,-341,-342,-313,-264,-265,-266,271,271,271,271,271,271,271,271,271,271,271,271,271,271,271,-300,-301,-302,-303,-304,-283,-298,-299,-289,-290,-307,-314,-308,]),'RSHIFT':([139,143,145,146,151,152,158,159,162,163,164,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,208,233,250,263,291,292,293,295,296,297,300,301,302,303,406,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,429,431,432,433,434,499,503,504,507,508,567,569,582,],[-333,-311,274,-263,-282,-284,-297,-320,-309,-310,-312,-321,-322,-323,-324,-325,-326,-327,-328,-329,-330,-331,-332,-335,-336,-337,-338,-344,-320,-282,-334,-305,-306,-285,-286,-287,-288,-339,-340,-341,-342,-313,-264,-265,-266,-267,-268,-269,-270,274,274,274,274,274,274,274,274,274,274,274,-300,-301,-302,-303,-304,-283,-298,-299,-289,-290,-307,-314,-308,]),'LSHIFT':([139,143,145,146,151,152,158,159,162,163,164,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,208,233,250,263,291,292,293,295,296,297,300,301,302,303,406,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,429,431,432,433,434,499,503,504,507,508,567,569,582,],[-333,-311,275,-263,-282,-284,-297,-320,-309,-310,-312,-321,-322,-323,-324,-325,-326,-327,-328,-329,-330,-331,-332,-335,-336,-337,-338,-344,-320,-282,-334,-305,-306,-285,-286,-287,-288,-339,-340,-341,-342,-313,-264,-265,-266,-267,-268,-269,-270,275,275,275,275,275,275,275,275,275,275,275,-300,-301,-302,-303,-304,-283,-298,-299,-289,-290,-307,-314,-308,]),'LT':([139,143,145,146,151,152,158,159,162,163,164,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,208,233,250,263,291,292,293,295,296,297,300,301,302,303,406,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,429,431,432,433,434,499,503,504,507,508,567,569,582,],[-333,-311,276,-263,-282,-284,-297,-320,-309,-310,-312,-321,-322,-323,-324,-325,-326,-327,-328,-329,-330,-331,-332,-335,-336,-337,-338,-344,-320,-282,-334,-305,-306,-285,-286,-287,-288,-339,-340,-341,-342,-313,-264,-265,-266,-267,-268,-269,-270,-271,-272,-273,-274,276,276,276,276,276,276,276,-300,-301,-302,-303,-304,-283,-298,-299,-289,-290,-307,-314,-308,]),'LE':([139,143,145,146,151,152,158,159,162,163,164,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,208,233,250,263,291,292,293,295,296,297,300,301,302,303,406,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,429,431,432,433,434,499,503,504,507,508,567,569,582,],[-333,-311,277,-263,-282,-284,-297,-320,-309,-310,-312,-321,-322,-323,-324,-325,-326,-327,-328,-329,-330,-331,-332,-335,-336,-337,-338,-344,-320,-282,-334,-305,-306,-285,-286,-287,-288,-339,-340,-341,-342,-313,-264,-265,-266,-267,-268,-269,-270,-271,-272,-273,-274,277,277,277,277,277,277,277,-300,-301,-302,-303,-304,-283,-298,-299,-289,-290,-307,-314,-308,]),'GE':([139,143,145,146,151,152,158,159,162,163,164,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,208,233,250,263,291,292,293,295,296,297,300,301,302,303,406,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,429,431,432,433,434,499,503,504,507,508,567,569,582,],[-333,-311,278,-263,-282,-284,-297,-320,-309,-310,-312,-321,-322,-323,-324,-325,-326,-327,-328,-329,-330,-331,-332,-335,-336,-337,-338,-344,-320,-282,-334,-305,-306,-285,-286,-287,-288,-339,-340,-341,-342,-313,-264,-265,-266,-267,-268,-269,-270,-271,-272,-273,-274,278,278,278,278,278,278,278,-300,-301,-302,-303,-304,-283,-298,-299,-289,-290,-307,-314,-308,]),'GT':([139,143,145,146,151,152,158,159,162,163,164,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,208,233,250,263,291,292,293,295,296,297,300,301,302,303,406,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,429,431,432,433,434,499,503,504,507,508,567,569,582,],[-333,-311,279,-263,-282,-284,-297,-320,-309,-310,-312,-321,-322,-323,-324,-325,-326,-327,-328,-329,-330,-331,-332,-335,-336,-337,-338,-344,-320,-282,-334,-305,-306,-285,-286,-287,-288,-339,-340,-341,-342,-313,-264,-265,-266,-267,-268,-269,-270,-271,-272,-273,-274,279,279,279,279,279,279,279,-300,-301,-302,-303,-304,-283,-298,-299,-289,-290,-307,-314,-308,]),'EQ':([139,143,145,146,151,152,158,159,162,163,164,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,208,233,250,263,291,292,293,295,296,297,300,301,302,303,406,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,429,431,432,433,434,499,503,504,507,508,567,569,582,],[-333,-311,280,-263,-282,-284,-297,-320,-309,-310,-312,-321,-322,-323,-324,-325,-326,-327,-328,-329,-330,-331,-332,-335,-336,-337,-338,-344,-320,-282,-334,-305,-306,-285,-286,-287,-288,-339,-340,-341,-342,-313,-264,-265,-266,-267,-268,-269,-270,-271,-272,-273,-274,-275,-276,280,280,280,280,280,-300,-301,-302,-303,-304,-283,-298,-299,-289,-290,-307,-314,-308,]),'NE':([139,143,145,146,151,152,158,159,162,163,164,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,208,233,250,263,291,292,293,295,296,297,300,301,302,303,406,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,429,431,432,433,434,499,503,504,507,508,567,569,582,],[-333,-311,281,-263,-282,-284,-297,-320,-309,-310,-312,-321,-322,-323,-324,-325,-326,-327,-328,-329,-330,-331,-332,-335,-336,-337,-338,-344,-320,-282,-334,-305,-306,-285,-286,-287,-288,-339,-340,-341,-342,-313,-264,-265,-266,-267,-268,-269,-270,-271,-272,-273,-274,-275,-276,281,281,281,281,281,-300,-301,-302,-303,-304,-283,-298,-299,-289,-290,-307,-314,-308,]),'OR':([139,143,145,146,151,152,158,159,162,163,164,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,208,233,250,263,291,292,293,295,296,297,300,301,302,303,406,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,429,431,432,433,434,499,503,504,507,508,567,569,582,],[-333,-311,283,-263,-282,-284,-297,-320,-309,-310,-312,-321,-322,-323,-324,-325,-326,-327,-328,-329,-330,-331,-332,-335,-336,-337,-338,-344,-320,-282,-334,-305,-306,-285,-286,-287,-288,-339,-340,-341,-342,-313,-264,-265,-266,-267,-268,-269,-270,-271,-272,-273,-274,-275,-276,-277,-278,-279,283,283,-300,-301,-302,-303,-304,-283,-298,-299,-289,-290,-307,-314,-308,]),'XOR':([139,143,145,146,151,152,158,159,162,163,164,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,208,233,250,263,291,292,293,295,296,297,300,301,302,303,406,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,429,431,432,433,434,499,503,504,507,508,567,569,582,],[-333,-311,284,-263,-282,-284,-297,-320,-309,-310,-312,-321,-322,-323,-324,-325,-326,-327,-328,-329,-330,-331,-332,-335,-336,-337,-338,-344,-320,-282,-334,-305,-306,-285,-286,-287,-288,-339,-340,-341,-342,-313,-264,-265,-266,-267,-268,-269,-270,-271,-272,-273,-274,-275,-276,-277,284,-279,284,284,-300,-301,-302,-303,-304,-283,-298,-299,-289,-290,-307,-314,-308,]),'LAND':([139,143,145,146,151,152,158,159,162,163,164,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,208,233,250,263,291,292,293,295,296,297,300,301,302,303,406,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,429,431,432,433,434,499,503,504,507,508,567,569,582,],[-333,-311,285,-263,-282,-284,-297,-320,-309,-310,-312,-321,-322,-323,-324,-325,-326,-327,-328,-329,-330,-331,-332,-335,-336,-337,-338,-344,-320,-282,-334,-305,-306,-285,-286,-287,-288,-339,-340,-341,-342,-313,-264,-265,-266,-267,-268,-269,-270,-271,-272,-273,-274,-275,-276,-277,-278,-279,-280,285,-300,-301,-302,-303,-304,-283,-298,-299,-289,-290,-307,-314,-308,]),'LOR':([139,143,145,146,151,152,158,159,162,163,164,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,208,233,250,263,291,292,293,295,296,297,300,301,302,303,406,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,429,431,432,433,434,499,503,504,507,508,567,569,582,],[-333,-311,286,-263,-282,-284,-297,-320,-309,-310,-312,-321,-322,-323,-324,-325,-326,-327,-328,-329,-330,-331,-332,-335,-336,-337,-338,-344,-320,-282,-334,-305,-306,-285,-286,-287,-288,-339,-340,-341,-342,-313,-264,-265,-266,-267,-268,-269,-270,-271,-272,-273,-274,-275,-276,-277,-278,-279,-280,-281,-300,-301,-302,-303,-304,-283,-298,-299,-289,-290,-307,-314,-308,]),'XOREQUAL':([139,143,151,152,158,159,162,163,164,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,208,233,250,263,291,292,293,295,296,297,300,301,302,303,406,429,431,432,433,434,499,503,504,507,508,567,569,582,],[-333,-311,-282,-284,-297,-320,-309,-310,-312,-321,-322,-323,-324,-325,-326,-327,-328,-329,-330,-331,-332,-335,-336,-337,-338,-344,-320,380,-334,-305,-306,-285,-286,-287,-288,-339,-340,-341,-342,-313,-300,-301,-302,-303,-304,-283,-298,-299,-289,-290,-307,-314,-308,]),'TIMESEQUAL':([139,143,151,152,158,159,162,163,164,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,208,233,250,263,291,292,293,295,296,297,300,301,302,303,406,429,431,432,433,434,499,503,504,507,508,567,569,582,],[-333,-311,-282,-284,-297,-320,-309,-310,-312,-321,-322,-323,-324,-325,-326,-327,-328,-329,-330,-331,-332,-335,-336,-337,-338,-344,-320,381,-334,-305,-306,-285,-286,-287,-288,-339,-340,-341,-342,-313,-300,-301,-302,-303,-304,-283,-298,-299,-289,-290,-307,-314,-308,]),'DIVEQUAL':([139,143,151,152,158,159,162,163,164,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,208,233,250,263,291,292,293,295,296,297,300,301,302,303,406,429,431,432,433,434,499,503,504,507,508,567,569,582,],[-333,-311,-282,-284,-297,-320,-309,-310,-312,-321,-322,-323,-324,-325,-326,-327,-328,-329,-330,-331,-332,-335,-336,-337,-338,-344,-320,382,-334,-305,-306,-285,-286,-287,-288,-339,-340,-341,-342,-313,-300,-301,-302,-303,-304,-283,-298,-299,-289,-290,-307,-314,-308,]),'MODEQUAL':([139,143,151,152,158,159,162,163,164,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,208,233,250,263,291,292,293,295,296,297,300,301,302,303,406,429,431,432,433,434,499,503,504,507,508,567,569,582,],[-333,-311,-282,-284,-297,-320,-309,-310,-312,-321,-322,-323,-324,-325,-326,-327,-328,-329,-330,-331,-332,-335,-336,-337,-338,-344,-320,383,-334,-305,-306,-285,-286,-287,-288,-339,-340,-341,-342,-313,-300,-301,-302,-303,-304,-283,-298,-299,-289,-290,-307,-314,-308,]),'PLUSEQUAL':([139,143,151,152,158,159,162,163,164,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,208,233,250,263,291,292,293,295,296,297,300,301,302,303,406,429,431,432,433,434,499,503,504,507,508,567,569,582,],[-333,-311,-282,-284,-297,-320,-309,-310,-312,-321,-322,-323,-324,-325,-326,-327,-328,-329,-330,-331,-332,-335,-336,-337,-338,-344,-320,384,-334,-305,-306,-285,-286,-287,-288,-339,-340,-341,-342,-313,-300,-301,-302,-303,-304,-283,-298,-299,-289,-290,-307,-314,-308,]),'MINUSEQUAL':([139,143,151,152,158,159,162,163,164,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,208,233,250,263,291,292,293,295,296,297,300,301,302,303,406,429,431,432,433,434,499,503,504,507,508,567,569,582,],[-333,-311,-282,-284,-297,-320,-309,-310,-312,-321,-322,-323,-324,-325,-326,-327,-328,-329,-330,-331,-332,-335,-336,-337,-338,-344,-320,385,-334,-305,-306,-285,-286,-287,-288,-339,-340,-341,-342,-313,-300,-301,-302,-303,-304,-283,-298,-299,-289,-290,-307,-314,-308,]),'LSHIFTEQUAL':([139,143,151,152,158,159,162,163,164,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,208,233,250,263,291,292,293,295,296,297,300,301,302,303,406,429,431,432,433,434,499,503,504,507,508,567,569,582,],[-333,-311,-282,-284,-297,-320,-309,-310,-312,-321,-322,-323,-324,-325,-326,-327,-328,-329,-330,-331,-332,-335,-336,-337,-338,-344,-320,386,-334,-305,-306,-285,-286,-287,-288,-339,-340,-341,-342,-313,-300,-301,-302,-303,-304,-283,-298,-299,-289,-290,-307,-314,-308,]),'RSHIFTEQUAL':([139,143,151,152,158,159,162,163,164,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,208,233,250,263,291,292,293,295,296,297,300,301,302,303,406,429,431,432,433,434,499,503,504,507,508,567,569,582,],[-333,-311,-282,-284,-297,-320,-309,-310,-312,-321,-322,-323,-324,-325,-326,-327,-328,-329,-330,-331,-332,-335,-336,-337,-338,-344,-320,387,-334,-305,-306,-285,-286,-287,-288,-339,-340,-341,-342,-313,-300,-301,-302,-303,-304,-283,-298,-299,-289,-290,-307,-314,-308,]),'ANDEQUAL':([139,143,151,152,158,159,162,163,164,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,208,233,250,263,291,292,293,295,296,297,300,301,302,303,406,429,431,432,433,434,499,503,504,507,508,567,569,582,],[-333,-311,-282,-284,-297,-320,-309,-310,-312,-321,-322,-323,-324,-325,-326,-327,-328,-329,-330,-331,-332,-335,-336,-337,-338,-344,-320,388,-334,-305,-306,-285,-286,-287,-288,-339,-340,-341,-342,-313,-300,-301,-302,-303,-304,-283,-298,-299,-289,-290,-307,-314,-308,]),'OREQUAL':([139,143,151,152,158,159,162,163,164,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,208,233,250,263,291,292,293,295,296,297,300,301,302,303,406,429,431,432,433,434,499,503,504,507,508,567,569,582,],[-333,-311,-282,-284,-297,-320,-309,-310,-312,-321,-322,-323,-324,-325,-326,-327,-328,-329,-330,-331,-332,-335,-336,-337,-338,-344,-320,389,-334,-305,-306,-285,-286,-287,-288,-339,-340,-341,-342,-313,-300,-301,-302,-303,-304,-283,-298,-299,-289,-290,-307,-314,-308,]),'ELLIPSIS':([313,],[443,]),} + +_lr_action = {} +for _k, _v in _lr_action_items.items(): + for _x,_y in zip(_v[0],_v[1]): + if not _x in _lr_action: _lr_action[_x] = {} + _lr_action[_x][_k] = _y +del _lr_action_items + +_lr_goto_items = {'translation_unit_or_empty':([0,],[1,]),'translation_unit':([0,],[2,]),'empty':([0,11,12,21,22,23,26,27,30,34,69,70,71,73,95,96,101,128,136,137,182,183,192,209,216,221,242,256,257,258,322,323,351,358,360,369,372,448,449,455,457,459,460,472,484,489,500,512,513,529,530,531,533,565,568,578,580,590,592,],[3,66,83,99,99,99,107,99,114,99,83,107,99,66,114,188,99,220,114,188,307,114,320,343,320,357,357,392,307,114,453,114,453,357,357,357,357,114,188,307,114,307,453,357,357,537,537,307,114,357,357,357,357,357,537,357,357,357,357,]),'external_declaration':([0,2,],[4,64,]),'function_definition':([0,2,],[5,5,]),'declaration':([0,2,11,67,73,128,221,372,],[6,6,68,129,68,223,223,484,]),'pp_directive':([0,2,],[7,7,]),'pppragma_directive':([0,2,124,128,203,204,205,221,242,333,335,358,360,369,472,529,530,531,578,590,592,],[8,8,211,231,211,211,211,231,371,211,211,371,371,482,371,560,371,371,371,371,371,]),'static_assert':([0,2,128,221,242,358,360,369,472,529,530,531,578,590,592,],[10,10,232,232,232,232,232,232,232,232,232,232,232,232,232,]),'id_declarator':([0,2,12,17,26,69,70,82,134,192,194,209,322,468,],[11,11,73,93,111,130,111,93,130,315,130,130,93,130,]),'declaration_specifiers':([0,2,11,67,73,96,128,137,221,313,322,351,372,449,460,],[12,12,69,69,69,192,69,192,69,192,192,192,69,192,192,]),'decl_body':([0,2,11,67,73,128,221,372,],[13,13,13,13,13,13,13,13,]),'direct_id_declarator':([0,2,12,17,20,26,69,70,80,82,134,192,194,209,318,322,452,468,],[19,19,19,19,97,19,19,19,97,19,19,19,19,19,97,19,97,19,]),'pointer':([0,2,12,17,26,69,70,82,113,134,192,194,209,216,322,351,468,],[20,20,80,20,20,80,20,80,196,80,318,80,80,352,452,352,80,]),'type_qualifier':([0,2,11,12,21,22,23,27,30,34,67,69,71,73,95,96,101,115,124,125,126,128,136,137,141,183,184,192,203,204,205,209,213,216,221,238,258,259,294,298,299,304,313,322,323,333,335,351,372,448,449,457,458,460,513,514,],[21,21,21,74,21,21,21,21,116,21,21,74,21,21,116,21,21,197,116,116,116,21,116,21,116,116,197,74,116,116,116,341,197,341,21,116,116,197,116,116,116,116,21,21,116,116,116,21,21,116,21,116,197,21,116,197,]),'storage_class_specifier':([0,2,11,12,21,22,23,27,34,67,69,71,73,96,101,128,137,192,221,313,322,351,372,449,460,],[22,22,22,75,22,22,22,22,22,22,75,22,22,22,22,22,22,75,22,22,22,22,22,22,22,]),'function_specifier':([0,2,11,12,21,22,23,27,34,67,69,71,73,96,101,128,137,192,221,313,322,351,372,449,460,],[23,23,23,76,23,23,23,23,23,23,76,23,23,23,23,23,23,76,23,23,23,23,23,23,23,]),'type_specifier_no_typeid':([0,2,11,12,26,67,69,70,73,96,124,125,126,128,137,141,192,193,203,204,205,209,213,216,221,238,294,298,299,304,313,322,333,335,351,372,449,460,],[24,24,24,77,24,24,77,24,24,24,24,24,24,24,24,24,77,24,24,24,24,340,24,340,24,24,24,24,24,24,24,24,24,24,24,24,24,24,]),'type_specifier':([0,2,11,26,67,70,73,96,124,125,126,128,137,141,193,203,204,205,213,221,238,294,298,299,304,313,322,333,335,351,372,449,460,],[25,25,25,104,25,104,25,25,212,212,212,25,25,212,104,212,212,212,348,25,212,212,212,212,212,25,25,212,212,25,25,25,25,]),'declaration_specifiers_no_type':([0,2,11,21,22,23,27,34,67,71,73,96,101,128,137,221,313,322,351,372,449,460,],[26,26,70,100,100,100,100,100,70,100,70,193,100,70,193,70,193,193,193,70,193,193,]),'alignment_specifier':([0,2,11,12,21,22,23,27,34,67,69,71,73,96,101,124,125,126,128,137,141,192,203,204,205,209,216,221,238,294,298,299,304,313,322,333,335,351,372,449,460,],[27,27,27,78,27,27,27,27,27,27,78,27,27,27,27,214,214,214,27,27,214,78,214,214,214,342,342,27,214,214,214,214,214,27,27,214,214,27,27,27,27,]),'typedef_name':([0,2,11,26,67,70,73,96,124,125,126,128,137,141,193,203,204,205,213,221,238,294,298,299,304,313,322,333,335,351,372,449,460,],[31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,]),'enum_specifier':([0,2,11,26,67,70,73,96,124,125,126,128,137,141,193,203,204,205,213,221,238,294,298,299,304,313,322,333,335,351,372,449,460,],[32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,]),'struct_or_union_specifier':([0,2,11,26,67,70,73,96,124,125,126,128,137,141,193,203,204,205,213,221,238,294,298,299,304,313,322,333,335,351,372,449,460,],[33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,]),'atomic_specifier':([0,2,11,21,22,23,26,27,34,67,70,71,73,96,101,124,125,126,128,137,141,193,203,204,205,213,221,238,294,298,299,304,313,322,333,335,351,372,449,460,],[34,34,71,101,101,101,106,101,101,71,106,101,71,34,101,106,106,106,71,34,106,106,106,106,106,106,71,106,106,106,106,106,34,34,106,106,34,71,34,34,]),'struct_or_union':([0,2,11,26,67,70,73,96,124,125,126,128,137,141,193,203,204,205,213,221,238,294,298,299,304,313,322,333,335,351,372,449,460,],[37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,]),'declaration_list_opt':([11,73,],[65,131,]),'declaration_list':([11,73,],[67,67,]),'init_declarator_list_opt':([12,69,],[79,79,]),'init_declarator_list':([12,69,],[84,84,]),'init_declarator':([12,69,134,194,],[85,85,253,326,]),'declarator':([12,69,134,194,209,468,],[86,86,86,86,346,346,]),'typeid_declarator':([12,69,82,134,194,209,468,],[87,87,133,87,87,87,87,]),'direct_typeid_declarator':([12,69,80,82,134,194,209,468,],[88,88,132,88,88,88,88,88,]),'declaration_specifiers_no_type_opt':([21,22,23,27,34,71,101,],[98,102,103,112,117,117,117,]),'id_init_declarator_list_opt':([26,70,],[105,105,]),'id_init_declarator_list':([26,70,],[108,108,]),'id_init_declarator':([26,70,],[110,110,]),'type_qualifier_list_opt':([30,95,136,183,258,323,448,457,513,],[113,182,257,309,401,455,512,521,548,]),'type_qualifier_list':([30,95,124,125,126,136,141,183,203,204,205,238,258,294,298,299,304,323,333,335,448,457,513,],[115,184,213,213,213,259,213,115,213,213,213,213,115,213,213,213,213,458,213,213,514,115,115,]),'brace_open':([36,37,65,118,119,122,123,128,131,135,195,221,238,242,358,360,369,393,405,472,476,506,507,529,530,531,536,578,590,592,],[120,124,128,198,199,203,204,128,128,256,256,128,128,128,128,128,128,256,500,128,500,500,500,128,128,128,256,128,128,128,]),'compound_statement':([65,128,131,221,238,242,358,360,369,472,529,530,531,578,590,592,],[127,227,251,227,363,227,227,227,227,227,227,227,227,227,227,227,]),'unified_string_literal':([92,94,126,128,135,141,153,154,155,156,182,195,221,234,238,242,247,257,266,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,294,298,309,310,332,347,358,360,362,365,366,367,369,372,378,393,397,401,402,405,455,459,469,472,476,484,502,505,512,521,522,529,530,531,532,533,536,548,549,565,571,578,580,590,592,],[138,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,407,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,]),'constant_expression':([94,126,234,332,347,397,469,],[142,218,359,464,470,493,527,]),'conditional_expression':([94,126,128,135,141,182,195,221,234,238,242,247,257,268,287,288,294,298,309,310,332,347,358,360,362,365,366,367,369,372,378,393,397,401,402,455,459,469,472,484,502,505,512,521,522,529,530,531,532,533,536,548,549,565,571,578,580,590,592,],[144,144,249,249,249,249,249,249,144,249,249,249,249,249,249,249,249,249,249,249,144,144,249,249,249,249,249,249,249,249,249,249,144,249,249,249,249,144,249,249,542,249,249,249,249,249,249,249,249,249,249,249,249,249,249,249,249,249,249,]),'binary_expression':([94,126,128,135,141,182,195,221,234,238,242,247,257,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,294,298,309,310,332,347,358,360,362,365,366,367,369,372,378,393,397,401,402,455,459,469,472,484,502,505,512,521,522,529,530,531,532,533,536,548,549,565,571,578,580,590,592,],[145,145,145,145,145,145,145,145,145,145,145,145,145,145,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,]),'cast_expression':([94,126,128,135,141,155,182,195,221,234,238,242,247,257,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,294,298,309,310,332,347,358,360,362,365,366,367,369,372,378,393,397,401,402,405,455,459,469,472,476,484,502,505,512,521,522,529,530,531,532,533,536,548,549,565,571,578,580,590,592,],[146,146,146,146,146,296,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,499,146,146,146,146,499,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,]),'unary_expression':([94,126,128,135,141,153,154,155,156,182,195,221,234,238,242,247,257,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,294,298,309,310,332,347,358,360,362,365,366,367,369,372,378,393,397,401,402,405,455,459,469,472,476,484,502,505,512,521,522,529,530,531,532,533,536,548,549,565,571,578,580,590,592,],[151,151,250,250,250,293,295,151,297,250,250,250,151,250,250,250,250,250,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,250,250,250,250,250,250,151,151,250,250,250,250,250,250,250,250,250,250,151,250,250,151,250,250,151,250,151,250,151,250,250,250,250,250,250,250,250,250,250,250,250,250,250,250,250,250,250,]),'postfix_expression':([94,126,128,135,141,153,154,155,156,182,195,221,234,238,242,247,257,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,294,298,309,310,332,347,358,360,362,365,366,367,369,372,378,393,397,401,402,405,455,459,469,472,476,484,502,505,512,521,522,529,530,531,532,533,536,548,549,565,571,578,580,590,592,],[152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,]),'unary_operator':([94,126,128,135,141,153,154,155,156,182,195,221,234,238,242,247,257,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,294,298,309,310,332,347,358,360,362,365,366,367,369,372,378,393,397,401,402,405,455,459,469,472,476,484,502,505,512,521,522,529,530,531,532,533,536,548,549,565,571,578,580,590,592,],[155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,]),'primary_expression':([94,126,128,135,141,153,154,155,156,182,195,221,234,238,242,247,257,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,294,298,309,310,332,347,358,360,362,365,366,367,369,372,378,393,397,401,402,405,455,459,469,472,476,484,502,505,512,521,522,529,530,531,532,533,536,548,549,565,571,578,580,590,592,],[158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,]),'identifier':([94,96,126,128,135,137,141,153,154,155,156,182,195,221,234,238,242,247,257,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,294,298,309,310,314,332,347,358,360,362,365,366,367,369,372,378,393,397,398,401,402,405,449,455,459,469,472,476,484,502,505,509,512,521,522,529,530,531,532,533,536,548,549,565,570,571,578,580,590,592,],[162,191,162,162,162,191,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,445,162,162,162,162,162,162,162,162,162,162,162,162,162,494,162,162,162,191,162,162,162,162,162,162,162,162,545,162,162,162,162,162,162,162,162,162,162,162,162,583,162,162,162,162,162,]),'constant':([94,126,128,135,141,153,154,155,156,182,195,221,234,238,242,247,257,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,294,298,309,310,332,347,358,360,362,365,366,367,369,372,378,393,397,401,402,405,455,459,469,472,476,484,502,505,512,521,522,529,530,531,532,533,536,548,549,565,571,578,580,590,592,],[163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,]),'unified_wstring_literal':([94,126,128,135,141,153,154,155,156,182,195,221,234,238,242,247,257,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,294,298,309,310,332,347,358,360,362,365,366,367,369,372,378,393,397,401,402,405,455,459,469,472,476,484,502,505,512,521,522,529,530,531,532,533,536,548,549,565,571,578,580,590,592,],[164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,]),'parameter_type_list':([96,137,322,351,449,460,],[185,260,454,454,515,454,]),'identifier_list_opt':([96,137,449,],[186,261,516,]),'parameter_list':([96,137,322,351,449,460,],[187,187,187,187,187,187,]),'identifier_list':([96,137,449,],[189,189,189,]),'parameter_declaration':([96,137,313,322,351,449,460,],[190,190,444,190,190,190,190,]),'enumerator_list':([120,198,199,],[200,328,329,]),'enumerator':([120,198,199,331,],[201,201,201,463,]),'struct_declaration_list':([124,203,204,],[205,333,335,]),'brace_close':([124,200,203,204,205,219,328,329,333,335,390,489,541,568,],[206,330,334,336,337,355,461,462,465,466,488,535,567,582,]),'struct_declaration':([124,203,204,205,333,335,],[207,207,207,338,338,338,]),'specifier_qualifier_list':([124,125,126,141,203,204,205,238,294,298,299,304,333,335,],[209,216,216,216,209,209,209,216,216,216,216,216,209,209,]),'type_name':([125,126,141,238,294,298,299,304,],[215,217,264,364,435,436,437,438,]),'block_item_list_opt':([128,],[219,]),'block_item_list':([128,],[221,]),'block_item':([128,221,],[222,356,]),'statement':([128,221,242,358,360,369,472,529,530,531,578,590,592,],[224,224,370,370,370,481,370,559,370,370,370,370,370,]),'labeled_statement':([128,221,242,358,360,369,472,529,530,531,578,590,592,],[225,225,225,225,225,225,225,225,225,225,225,225,225,]),'expression_statement':([128,221,242,358,360,369,472,529,530,531,578,590,592,],[226,226,226,226,226,226,226,226,226,226,226,226,226,]),'selection_statement':([128,221,242,358,360,369,472,529,530,531,578,590,592,],[228,228,228,228,228,228,228,228,228,228,228,228,228,]),'iteration_statement':([128,221,242,358,360,369,472,529,530,531,578,590,592,],[229,229,229,229,229,229,229,229,229,229,229,229,229,]),'jump_statement':([128,221,242,358,360,369,472,529,530,531,578,590,592,],[230,230,230,230,230,230,230,230,230,230,230,230,230,]),'expression_opt':([128,221,242,358,360,369,372,472,484,529,530,531,533,565,578,580,590,592,],[236,236,236,236,236,236,483,236,534,236,236,236,564,581,236,589,236,236,]),'expression':([128,141,221,238,242,247,268,287,294,298,358,360,362,366,367,369,372,472,484,529,530,531,532,533,565,571,578,580,590,592,],[239,265,239,265,239,376,408,427,265,265,239,239,474,478,479,239,239,239,239,239,239,239,563,239,239,584,239,239,239,239,]),'assignment_expression':([128,135,141,182,195,221,238,242,247,257,268,287,288,294,298,309,310,358,360,362,365,366,367,369,372,378,393,401,402,455,459,472,484,505,512,521,522,529,530,531,532,533,536,548,549,565,571,578,580,590,592,],[248,255,248,308,255,248,248,248,248,308,248,248,430,248,248,441,442,248,248,248,477,248,248,248,248,487,255,497,498,308,308,248,248,543,308,553,554,248,248,248,248,248,255,574,575,248,248,248,248,248,248,]),'initializer':([135,195,393,536,],[254,327,490,566,]),'assignment_expression_opt':([182,257,455,459,512,],[305,399,519,523,546,]),'typeid_noparen_declarator':([192,],[316,]),'abstract_declarator_opt':([192,216,],[317,350,]),'direct_typeid_noparen_declarator':([192,318,],[319,446,]),'abstract_declarator':([192,216,322,351,],[321,321,450,450,]),'direct_abstract_declarator':([192,216,318,322,351,352,452,],[325,325,447,325,325,447,447,]),'struct_declarator_list_opt':([209,],[339,]),'struct_declarator_list':([209,],[344,]),'struct_declarator':([209,468,],[345,526,]),'pragmacomp_or_statement':([242,358,360,472,529,530,531,578,590,592,],[368,471,473,528,558,561,562,587,593,594,]),'pppragma_directive_list':([242,358,360,472,529,530,531,578,590,592,],[369,369,369,369,369,369,369,369,369,369,]),'assignment_operator':([250,],[378,]),'initializer_list_opt':([256,],[390,]),'initializer_list':([256,500,],[391,541,]),'designation_opt':([256,489,500,568,],[393,536,393,536,]),'designation':([256,489,500,568,],[394,394,394,394,]),'designator_list':([256,489,500,568,],[395,395,395,395,]),'designator':([256,395,489,500,568,],[396,492,396,396,396,]),'argument_expression_list':([288,],[428,]),'parameter_type_list_opt':([322,351,460,],[451,451,525,]),'offsetof_member_designator':([509,],[544,]),} + +_lr_goto = {} +for _k, _v in _lr_goto_items.items(): + for _x, _y in zip(_v[0], _v[1]): + if not _x in _lr_goto: _lr_goto[_x] = {} + _lr_goto[_x][_k] = _y +del _lr_goto_items +_lr_productions = [ + ("S' -> translation_unit_or_empty","S'",1,None,None,None), + ('abstract_declarator_opt -> empty','abstract_declarator_opt',1,'p_abstract_declarator_opt','plyparser.py',43), + ('abstract_declarator_opt -> abstract_declarator','abstract_declarator_opt',1,'p_abstract_declarator_opt','plyparser.py',44), + ('assignment_expression_opt -> empty','assignment_expression_opt',1,'p_assignment_expression_opt','plyparser.py',43), + ('assignment_expression_opt -> assignment_expression','assignment_expression_opt',1,'p_assignment_expression_opt','plyparser.py',44), + ('block_item_list_opt -> empty','block_item_list_opt',1,'p_block_item_list_opt','plyparser.py',43), + ('block_item_list_opt -> block_item_list','block_item_list_opt',1,'p_block_item_list_opt','plyparser.py',44), + ('declaration_list_opt -> empty','declaration_list_opt',1,'p_declaration_list_opt','plyparser.py',43), + ('declaration_list_opt -> declaration_list','declaration_list_opt',1,'p_declaration_list_opt','plyparser.py',44), + ('declaration_specifiers_no_type_opt -> empty','declaration_specifiers_no_type_opt',1,'p_declaration_specifiers_no_type_opt','plyparser.py',43), + ('declaration_specifiers_no_type_opt -> declaration_specifiers_no_type','declaration_specifiers_no_type_opt',1,'p_declaration_specifiers_no_type_opt','plyparser.py',44), + ('designation_opt -> empty','designation_opt',1,'p_designation_opt','plyparser.py',43), + ('designation_opt -> designation','designation_opt',1,'p_designation_opt','plyparser.py',44), + ('expression_opt -> empty','expression_opt',1,'p_expression_opt','plyparser.py',43), + ('expression_opt -> expression','expression_opt',1,'p_expression_opt','plyparser.py',44), + ('id_init_declarator_list_opt -> empty','id_init_declarator_list_opt',1,'p_id_init_declarator_list_opt','plyparser.py',43), + ('id_init_declarator_list_opt -> id_init_declarator_list','id_init_declarator_list_opt',1,'p_id_init_declarator_list_opt','plyparser.py',44), + ('identifier_list_opt -> empty','identifier_list_opt',1,'p_identifier_list_opt','plyparser.py',43), + ('identifier_list_opt -> identifier_list','identifier_list_opt',1,'p_identifier_list_opt','plyparser.py',44), + ('init_declarator_list_opt -> empty','init_declarator_list_opt',1,'p_init_declarator_list_opt','plyparser.py',43), + ('init_declarator_list_opt -> init_declarator_list','init_declarator_list_opt',1,'p_init_declarator_list_opt','plyparser.py',44), + ('initializer_list_opt -> empty','initializer_list_opt',1,'p_initializer_list_opt','plyparser.py',43), + ('initializer_list_opt -> initializer_list','initializer_list_opt',1,'p_initializer_list_opt','plyparser.py',44), + ('parameter_type_list_opt -> empty','parameter_type_list_opt',1,'p_parameter_type_list_opt','plyparser.py',43), + ('parameter_type_list_opt -> parameter_type_list','parameter_type_list_opt',1,'p_parameter_type_list_opt','plyparser.py',44), + ('struct_declarator_list_opt -> empty','struct_declarator_list_opt',1,'p_struct_declarator_list_opt','plyparser.py',43), + ('struct_declarator_list_opt -> struct_declarator_list','struct_declarator_list_opt',1,'p_struct_declarator_list_opt','plyparser.py',44), + ('type_qualifier_list_opt -> empty','type_qualifier_list_opt',1,'p_type_qualifier_list_opt','plyparser.py',43), + ('type_qualifier_list_opt -> type_qualifier_list','type_qualifier_list_opt',1,'p_type_qualifier_list_opt','plyparser.py',44), + ('direct_id_declarator -> ID','direct_id_declarator',1,'p_direct_id_declarator_1','plyparser.py',126), + ('direct_id_declarator -> LPAREN id_declarator RPAREN','direct_id_declarator',3,'p_direct_id_declarator_2','plyparser.py',126), + ('direct_id_declarator -> direct_id_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET','direct_id_declarator',5,'p_direct_id_declarator_3','plyparser.py',126), + ('direct_id_declarator -> direct_id_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET','direct_id_declarator',6,'p_direct_id_declarator_4','plyparser.py',126), + ('direct_id_declarator -> direct_id_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET','direct_id_declarator',6,'p_direct_id_declarator_4','plyparser.py',127), + ('direct_id_declarator -> direct_id_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET','direct_id_declarator',5,'p_direct_id_declarator_5','plyparser.py',126), + ('direct_id_declarator -> direct_id_declarator LPAREN parameter_type_list RPAREN','direct_id_declarator',4,'p_direct_id_declarator_6','plyparser.py',126), + ('direct_id_declarator -> direct_id_declarator LPAREN identifier_list_opt RPAREN','direct_id_declarator',4,'p_direct_id_declarator_6','plyparser.py',127), + ('direct_typeid_declarator -> TYPEID','direct_typeid_declarator',1,'p_direct_typeid_declarator_1','plyparser.py',126), + ('direct_typeid_declarator -> LPAREN typeid_declarator RPAREN','direct_typeid_declarator',3,'p_direct_typeid_declarator_2','plyparser.py',126), + ('direct_typeid_declarator -> direct_typeid_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET','direct_typeid_declarator',5,'p_direct_typeid_declarator_3','plyparser.py',126), + ('direct_typeid_declarator -> direct_typeid_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET','direct_typeid_declarator',6,'p_direct_typeid_declarator_4','plyparser.py',126), + ('direct_typeid_declarator -> direct_typeid_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET','direct_typeid_declarator',6,'p_direct_typeid_declarator_4','plyparser.py',127), + ('direct_typeid_declarator -> direct_typeid_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET','direct_typeid_declarator',5,'p_direct_typeid_declarator_5','plyparser.py',126), + ('direct_typeid_declarator -> direct_typeid_declarator LPAREN parameter_type_list RPAREN','direct_typeid_declarator',4,'p_direct_typeid_declarator_6','plyparser.py',126), + ('direct_typeid_declarator -> direct_typeid_declarator LPAREN identifier_list_opt RPAREN','direct_typeid_declarator',4,'p_direct_typeid_declarator_6','plyparser.py',127), + ('direct_typeid_noparen_declarator -> TYPEID','direct_typeid_noparen_declarator',1,'p_direct_typeid_noparen_declarator_1','plyparser.py',126), + ('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET','direct_typeid_noparen_declarator',5,'p_direct_typeid_noparen_declarator_3','plyparser.py',126), + ('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET','direct_typeid_noparen_declarator',6,'p_direct_typeid_noparen_declarator_4','plyparser.py',126), + ('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET','direct_typeid_noparen_declarator',6,'p_direct_typeid_noparen_declarator_4','plyparser.py',127), + ('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET','direct_typeid_noparen_declarator',5,'p_direct_typeid_noparen_declarator_5','plyparser.py',126), + ('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LPAREN parameter_type_list RPAREN','direct_typeid_noparen_declarator',4,'p_direct_typeid_noparen_declarator_6','plyparser.py',126), + ('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LPAREN identifier_list_opt RPAREN','direct_typeid_noparen_declarator',4,'p_direct_typeid_noparen_declarator_6','plyparser.py',127), + ('id_declarator -> direct_id_declarator','id_declarator',1,'p_id_declarator_1','plyparser.py',126), + ('id_declarator -> pointer direct_id_declarator','id_declarator',2,'p_id_declarator_2','plyparser.py',126), + ('typeid_declarator -> direct_typeid_declarator','typeid_declarator',1,'p_typeid_declarator_1','plyparser.py',126), + ('typeid_declarator -> pointer direct_typeid_declarator','typeid_declarator',2,'p_typeid_declarator_2','plyparser.py',126), + ('typeid_noparen_declarator -> direct_typeid_noparen_declarator','typeid_noparen_declarator',1,'p_typeid_noparen_declarator_1','plyparser.py',126), + ('typeid_noparen_declarator -> pointer direct_typeid_noparen_declarator','typeid_noparen_declarator',2,'p_typeid_noparen_declarator_2','plyparser.py',126), + ('translation_unit_or_empty -> translation_unit','translation_unit_or_empty',1,'p_translation_unit_or_empty','c_parser.py',509), + ('translation_unit_or_empty -> empty','translation_unit_or_empty',1,'p_translation_unit_or_empty','c_parser.py',510), + ('translation_unit -> external_declaration','translation_unit',1,'p_translation_unit_1','c_parser.py',518), + ('translation_unit -> translation_unit external_declaration','translation_unit',2,'p_translation_unit_2','c_parser.py',524), + ('external_declaration -> function_definition','external_declaration',1,'p_external_declaration_1','c_parser.py',534), + ('external_declaration -> declaration','external_declaration',1,'p_external_declaration_2','c_parser.py',539), + ('external_declaration -> pp_directive','external_declaration',1,'p_external_declaration_3','c_parser.py',544), + ('external_declaration -> pppragma_directive','external_declaration',1,'p_external_declaration_3','c_parser.py',545), + ('external_declaration -> SEMI','external_declaration',1,'p_external_declaration_4','c_parser.py',550), + ('external_declaration -> static_assert','external_declaration',1,'p_external_declaration_5','c_parser.py',555), + ('static_assert -> _STATIC_ASSERT LPAREN constant_expression COMMA unified_string_literal RPAREN','static_assert',6,'p_static_assert_declaration','c_parser.py',560), + ('static_assert -> _STATIC_ASSERT LPAREN constant_expression RPAREN','static_assert',4,'p_static_assert_declaration','c_parser.py',561), + ('pp_directive -> PPHASH','pp_directive',1,'p_pp_directive','c_parser.py',569), + ('pppragma_directive -> PPPRAGMA','pppragma_directive',1,'p_pppragma_directive','c_parser.py',580), + ('pppragma_directive -> PPPRAGMA PPPRAGMASTR','pppragma_directive',2,'p_pppragma_directive','c_parser.py',581), + ('pppragma_directive -> _PRAGMA LPAREN unified_string_literal RPAREN','pppragma_directive',4,'p_pppragma_directive','c_parser.py',582), + ('pppragma_directive_list -> pppragma_directive','pppragma_directive_list',1,'p_pppragma_directive_list','c_parser.py',592), + ('pppragma_directive_list -> pppragma_directive_list pppragma_directive','pppragma_directive_list',2,'p_pppragma_directive_list','c_parser.py',593), + ('function_definition -> id_declarator declaration_list_opt compound_statement','function_definition',3,'p_function_definition_1','c_parser.py',600), + ('function_definition -> declaration_specifiers id_declarator declaration_list_opt compound_statement','function_definition',4,'p_function_definition_2','c_parser.py',618), + ('statement -> labeled_statement','statement',1,'p_statement','c_parser.py',633), + ('statement -> expression_statement','statement',1,'p_statement','c_parser.py',634), + ('statement -> compound_statement','statement',1,'p_statement','c_parser.py',635), + ('statement -> selection_statement','statement',1,'p_statement','c_parser.py',636), + ('statement -> iteration_statement','statement',1,'p_statement','c_parser.py',637), + ('statement -> jump_statement','statement',1,'p_statement','c_parser.py',638), + ('statement -> pppragma_directive','statement',1,'p_statement','c_parser.py',639), + ('statement -> static_assert','statement',1,'p_statement','c_parser.py',640), + ('pragmacomp_or_statement -> pppragma_directive_list statement','pragmacomp_or_statement',2,'p_pragmacomp_or_statement','c_parser.py',688), + ('pragmacomp_or_statement -> statement','pragmacomp_or_statement',1,'p_pragmacomp_or_statement','c_parser.py',689), + ('decl_body -> declaration_specifiers init_declarator_list_opt','decl_body',2,'p_decl_body','c_parser.py',708), + ('decl_body -> declaration_specifiers_no_type id_init_declarator_list_opt','decl_body',2,'p_decl_body','c_parser.py',709), + ('declaration -> decl_body SEMI','declaration',2,'p_declaration','c_parser.py',769), + ('declaration_list -> declaration','declaration_list',1,'p_declaration_list','c_parser.py',778), + ('declaration_list -> declaration_list declaration','declaration_list',2,'p_declaration_list','c_parser.py',779), + ('declaration_specifiers_no_type -> type_qualifier declaration_specifiers_no_type_opt','declaration_specifiers_no_type',2,'p_declaration_specifiers_no_type_1','c_parser.py',789), + ('declaration_specifiers_no_type -> storage_class_specifier declaration_specifiers_no_type_opt','declaration_specifiers_no_type',2,'p_declaration_specifiers_no_type_2','c_parser.py',794), + ('declaration_specifiers_no_type -> function_specifier declaration_specifiers_no_type_opt','declaration_specifiers_no_type',2,'p_declaration_specifiers_no_type_3','c_parser.py',799), + ('declaration_specifiers_no_type -> atomic_specifier declaration_specifiers_no_type_opt','declaration_specifiers_no_type',2,'p_declaration_specifiers_no_type_4','c_parser.py',806), + ('declaration_specifiers_no_type -> alignment_specifier declaration_specifiers_no_type_opt','declaration_specifiers_no_type',2,'p_declaration_specifiers_no_type_5','c_parser.py',811), + ('declaration_specifiers -> declaration_specifiers type_qualifier','declaration_specifiers',2,'p_declaration_specifiers_1','c_parser.py',816), + ('declaration_specifiers -> declaration_specifiers storage_class_specifier','declaration_specifiers',2,'p_declaration_specifiers_2','c_parser.py',821), + ('declaration_specifiers -> declaration_specifiers function_specifier','declaration_specifiers',2,'p_declaration_specifiers_3','c_parser.py',826), + ('declaration_specifiers -> declaration_specifiers type_specifier_no_typeid','declaration_specifiers',2,'p_declaration_specifiers_4','c_parser.py',831), + ('declaration_specifiers -> type_specifier','declaration_specifiers',1,'p_declaration_specifiers_5','c_parser.py',836), + ('declaration_specifiers -> declaration_specifiers_no_type type_specifier','declaration_specifiers',2,'p_declaration_specifiers_6','c_parser.py',841), + ('declaration_specifiers -> declaration_specifiers alignment_specifier','declaration_specifiers',2,'p_declaration_specifiers_7','c_parser.py',846), + ('storage_class_specifier -> AUTO','storage_class_specifier',1,'p_storage_class_specifier','c_parser.py',851), + ('storage_class_specifier -> REGISTER','storage_class_specifier',1,'p_storage_class_specifier','c_parser.py',852), + ('storage_class_specifier -> STATIC','storage_class_specifier',1,'p_storage_class_specifier','c_parser.py',853), + ('storage_class_specifier -> EXTERN','storage_class_specifier',1,'p_storage_class_specifier','c_parser.py',854), + ('storage_class_specifier -> TYPEDEF','storage_class_specifier',1,'p_storage_class_specifier','c_parser.py',855), + ('storage_class_specifier -> _THREAD_LOCAL','storage_class_specifier',1,'p_storage_class_specifier','c_parser.py',856), + ('function_specifier -> INLINE','function_specifier',1,'p_function_specifier','c_parser.py',861), + ('function_specifier -> _NORETURN','function_specifier',1,'p_function_specifier','c_parser.py',862), + ('type_specifier_no_typeid -> VOID','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',867), + ('type_specifier_no_typeid -> _BOOL','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',868), + ('type_specifier_no_typeid -> CHAR','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',869), + ('type_specifier_no_typeid -> SHORT','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',870), + ('type_specifier_no_typeid -> INT','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',871), + ('type_specifier_no_typeid -> LONG','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',872), + ('type_specifier_no_typeid -> FLOAT','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',873), + ('type_specifier_no_typeid -> DOUBLE','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',874), + ('type_specifier_no_typeid -> _COMPLEX','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',875), + ('type_specifier_no_typeid -> SIGNED','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',876), + ('type_specifier_no_typeid -> UNSIGNED','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',877), + ('type_specifier_no_typeid -> __INT128','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',878), + ('type_specifier -> typedef_name','type_specifier',1,'p_type_specifier','c_parser.py',883), + ('type_specifier -> enum_specifier','type_specifier',1,'p_type_specifier','c_parser.py',884), + ('type_specifier -> struct_or_union_specifier','type_specifier',1,'p_type_specifier','c_parser.py',885), + ('type_specifier -> type_specifier_no_typeid','type_specifier',1,'p_type_specifier','c_parser.py',886), + ('type_specifier -> atomic_specifier','type_specifier',1,'p_type_specifier','c_parser.py',887), + ('atomic_specifier -> _ATOMIC LPAREN type_name RPAREN','atomic_specifier',4,'p_atomic_specifier','c_parser.py',893), + ('type_qualifier -> CONST','type_qualifier',1,'p_type_qualifier','c_parser.py',900), + ('type_qualifier -> RESTRICT','type_qualifier',1,'p_type_qualifier','c_parser.py',901), + ('type_qualifier -> VOLATILE','type_qualifier',1,'p_type_qualifier','c_parser.py',902), + ('type_qualifier -> _ATOMIC','type_qualifier',1,'p_type_qualifier','c_parser.py',903), + ('init_declarator_list -> init_declarator','init_declarator_list',1,'p_init_declarator_list','c_parser.py',908), + ('init_declarator_list -> init_declarator_list COMMA init_declarator','init_declarator_list',3,'p_init_declarator_list','c_parser.py',909), + ('init_declarator -> declarator','init_declarator',1,'p_init_declarator','c_parser.py',917), + ('init_declarator -> declarator EQUALS initializer','init_declarator',3,'p_init_declarator','c_parser.py',918), + ('id_init_declarator_list -> id_init_declarator','id_init_declarator_list',1,'p_id_init_declarator_list','c_parser.py',923), + ('id_init_declarator_list -> id_init_declarator_list COMMA init_declarator','id_init_declarator_list',3,'p_id_init_declarator_list','c_parser.py',924), + ('id_init_declarator -> id_declarator','id_init_declarator',1,'p_id_init_declarator','c_parser.py',929), + ('id_init_declarator -> id_declarator EQUALS initializer','id_init_declarator',3,'p_id_init_declarator','c_parser.py',930), + ('specifier_qualifier_list -> specifier_qualifier_list type_specifier_no_typeid','specifier_qualifier_list',2,'p_specifier_qualifier_list_1','c_parser.py',937), + ('specifier_qualifier_list -> specifier_qualifier_list type_qualifier','specifier_qualifier_list',2,'p_specifier_qualifier_list_2','c_parser.py',942), + ('specifier_qualifier_list -> type_specifier','specifier_qualifier_list',1,'p_specifier_qualifier_list_3','c_parser.py',947), + ('specifier_qualifier_list -> type_qualifier_list type_specifier','specifier_qualifier_list',2,'p_specifier_qualifier_list_4','c_parser.py',952), + ('specifier_qualifier_list -> alignment_specifier','specifier_qualifier_list',1,'p_specifier_qualifier_list_5','c_parser.py',957), + ('specifier_qualifier_list -> specifier_qualifier_list alignment_specifier','specifier_qualifier_list',2,'p_specifier_qualifier_list_6','c_parser.py',962), + ('struct_or_union_specifier -> struct_or_union ID','struct_or_union_specifier',2,'p_struct_or_union_specifier_1','c_parser.py',970), + ('struct_or_union_specifier -> struct_or_union TYPEID','struct_or_union_specifier',2,'p_struct_or_union_specifier_1','c_parser.py',971), + ('struct_or_union_specifier -> struct_or_union brace_open struct_declaration_list brace_close','struct_or_union_specifier',4,'p_struct_or_union_specifier_2','c_parser.py',981), + ('struct_or_union_specifier -> struct_or_union brace_open brace_close','struct_or_union_specifier',3,'p_struct_or_union_specifier_2','c_parser.py',982), + ('struct_or_union_specifier -> struct_or_union ID brace_open struct_declaration_list brace_close','struct_or_union_specifier',5,'p_struct_or_union_specifier_3','c_parser.py',999), + ('struct_or_union_specifier -> struct_or_union ID brace_open brace_close','struct_or_union_specifier',4,'p_struct_or_union_specifier_3','c_parser.py',1000), + ('struct_or_union_specifier -> struct_or_union TYPEID brace_open struct_declaration_list brace_close','struct_or_union_specifier',5,'p_struct_or_union_specifier_3','c_parser.py',1001), + ('struct_or_union_specifier -> struct_or_union TYPEID brace_open brace_close','struct_or_union_specifier',4,'p_struct_or_union_specifier_3','c_parser.py',1002), + ('struct_or_union -> STRUCT','struct_or_union',1,'p_struct_or_union','c_parser.py',1018), + ('struct_or_union -> UNION','struct_or_union',1,'p_struct_or_union','c_parser.py',1019), + ('struct_declaration_list -> struct_declaration','struct_declaration_list',1,'p_struct_declaration_list','c_parser.py',1026), + ('struct_declaration_list -> struct_declaration_list struct_declaration','struct_declaration_list',2,'p_struct_declaration_list','c_parser.py',1027), + ('struct_declaration -> specifier_qualifier_list struct_declarator_list_opt SEMI','struct_declaration',3,'p_struct_declaration_1','c_parser.py',1035), + ('struct_declaration -> SEMI','struct_declaration',1,'p_struct_declaration_2','c_parser.py',1073), + ('struct_declaration -> pppragma_directive','struct_declaration',1,'p_struct_declaration_3','c_parser.py',1078), + ('struct_declarator_list -> struct_declarator','struct_declarator_list',1,'p_struct_declarator_list','c_parser.py',1083), + ('struct_declarator_list -> struct_declarator_list COMMA struct_declarator','struct_declarator_list',3,'p_struct_declarator_list','c_parser.py',1084), + ('struct_declarator -> declarator','struct_declarator',1,'p_struct_declarator_1','c_parser.py',1092), + ('struct_declarator -> declarator COLON constant_expression','struct_declarator',3,'p_struct_declarator_2','c_parser.py',1097), + ('struct_declarator -> COLON constant_expression','struct_declarator',2,'p_struct_declarator_2','c_parser.py',1098), + ('enum_specifier -> ENUM ID','enum_specifier',2,'p_enum_specifier_1','c_parser.py',1106), + ('enum_specifier -> ENUM TYPEID','enum_specifier',2,'p_enum_specifier_1','c_parser.py',1107), + ('enum_specifier -> ENUM brace_open enumerator_list brace_close','enum_specifier',4,'p_enum_specifier_2','c_parser.py',1112), + ('enum_specifier -> ENUM ID brace_open enumerator_list brace_close','enum_specifier',5,'p_enum_specifier_3','c_parser.py',1117), + ('enum_specifier -> ENUM TYPEID brace_open enumerator_list brace_close','enum_specifier',5,'p_enum_specifier_3','c_parser.py',1118), + ('enumerator_list -> enumerator','enumerator_list',1,'p_enumerator_list','c_parser.py',1123), + ('enumerator_list -> enumerator_list COMMA','enumerator_list',2,'p_enumerator_list','c_parser.py',1124), + ('enumerator_list -> enumerator_list COMMA enumerator','enumerator_list',3,'p_enumerator_list','c_parser.py',1125), + ('alignment_specifier -> _ALIGNAS LPAREN type_name RPAREN','alignment_specifier',4,'p_alignment_specifier','c_parser.py',1136), + ('alignment_specifier -> _ALIGNAS LPAREN constant_expression RPAREN','alignment_specifier',4,'p_alignment_specifier','c_parser.py',1137), + ('enumerator -> ID','enumerator',1,'p_enumerator','c_parser.py',1142), + ('enumerator -> ID EQUALS constant_expression','enumerator',3,'p_enumerator','c_parser.py',1143), + ('declarator -> id_declarator','declarator',1,'p_declarator','c_parser.py',1158), + ('declarator -> typeid_declarator','declarator',1,'p_declarator','c_parser.py',1159), + ('pointer -> TIMES type_qualifier_list_opt','pointer',2,'p_pointer','c_parser.py',1271), + ('pointer -> TIMES type_qualifier_list_opt pointer','pointer',3,'p_pointer','c_parser.py',1272), + ('type_qualifier_list -> type_qualifier','type_qualifier_list',1,'p_type_qualifier_list','c_parser.py',1301), + ('type_qualifier_list -> type_qualifier_list type_qualifier','type_qualifier_list',2,'p_type_qualifier_list','c_parser.py',1302), + ('parameter_type_list -> parameter_list','parameter_type_list',1,'p_parameter_type_list','c_parser.py',1307), + ('parameter_type_list -> parameter_list COMMA ELLIPSIS','parameter_type_list',3,'p_parameter_type_list','c_parser.py',1308), + ('parameter_list -> parameter_declaration','parameter_list',1,'p_parameter_list','c_parser.py',1316), + ('parameter_list -> parameter_list COMMA parameter_declaration','parameter_list',3,'p_parameter_list','c_parser.py',1317), + ('parameter_declaration -> declaration_specifiers id_declarator','parameter_declaration',2,'p_parameter_declaration_1','c_parser.py',1336), + ('parameter_declaration -> declaration_specifiers typeid_noparen_declarator','parameter_declaration',2,'p_parameter_declaration_1','c_parser.py',1337), + ('parameter_declaration -> declaration_specifiers abstract_declarator_opt','parameter_declaration',2,'p_parameter_declaration_2','c_parser.py',1348), + ('identifier_list -> identifier','identifier_list',1,'p_identifier_list','c_parser.py',1380), + ('identifier_list -> identifier_list COMMA identifier','identifier_list',3,'p_identifier_list','c_parser.py',1381), + ('initializer -> assignment_expression','initializer',1,'p_initializer_1','c_parser.py',1390), + ('initializer -> brace_open initializer_list_opt brace_close','initializer',3,'p_initializer_2','c_parser.py',1395), + ('initializer -> brace_open initializer_list COMMA brace_close','initializer',4,'p_initializer_2','c_parser.py',1396), + ('initializer_list -> designation_opt initializer','initializer_list',2,'p_initializer_list','c_parser.py',1404), + ('initializer_list -> initializer_list COMMA designation_opt initializer','initializer_list',4,'p_initializer_list','c_parser.py',1405), + ('designation -> designator_list EQUALS','designation',2,'p_designation','c_parser.py',1416), + ('designator_list -> designator','designator_list',1,'p_designator_list','c_parser.py',1424), + ('designator_list -> designator_list designator','designator_list',2,'p_designator_list','c_parser.py',1425), + ('designator -> LBRACKET constant_expression RBRACKET','designator',3,'p_designator','c_parser.py',1430), + ('designator -> PERIOD identifier','designator',2,'p_designator','c_parser.py',1431), + ('type_name -> specifier_qualifier_list abstract_declarator_opt','type_name',2,'p_type_name','c_parser.py',1436), + ('abstract_declarator -> pointer','abstract_declarator',1,'p_abstract_declarator_1','c_parser.py',1448), + ('abstract_declarator -> pointer direct_abstract_declarator','abstract_declarator',2,'p_abstract_declarator_2','c_parser.py',1456), + ('abstract_declarator -> direct_abstract_declarator','abstract_declarator',1,'p_abstract_declarator_3','c_parser.py',1461), + ('direct_abstract_declarator -> LPAREN abstract_declarator RPAREN','direct_abstract_declarator',3,'p_direct_abstract_declarator_1','c_parser.py',1471), + ('direct_abstract_declarator -> direct_abstract_declarator LBRACKET assignment_expression_opt RBRACKET','direct_abstract_declarator',4,'p_direct_abstract_declarator_2','c_parser.py',1475), + ('direct_abstract_declarator -> LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET','direct_abstract_declarator',4,'p_direct_abstract_declarator_3','c_parser.py',1486), + ('direct_abstract_declarator -> direct_abstract_declarator LBRACKET TIMES RBRACKET','direct_abstract_declarator',4,'p_direct_abstract_declarator_4','c_parser.py',1496), + ('direct_abstract_declarator -> LBRACKET TIMES RBRACKET','direct_abstract_declarator',3,'p_direct_abstract_declarator_5','c_parser.py',1507), + ('direct_abstract_declarator -> direct_abstract_declarator LPAREN parameter_type_list_opt RPAREN','direct_abstract_declarator',4,'p_direct_abstract_declarator_6','c_parser.py',1516), + ('direct_abstract_declarator -> LPAREN parameter_type_list_opt RPAREN','direct_abstract_declarator',3,'p_direct_abstract_declarator_7','c_parser.py',1526), + ('direct_abstract_declarator -> LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET','direct_abstract_declarator',5,'p_direct_abstract_declarator_8','c_parser.py',1534), + ('direct_abstract_declarator -> LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET','direct_abstract_declarator',5,'p_direct_abstract_declarator_8','c_parser.py',1535), + ('block_item -> declaration','block_item',1,'p_block_item','c_parser.py',1551), + ('block_item -> statement','block_item',1,'p_block_item','c_parser.py',1552), + ('block_item_list -> block_item','block_item_list',1,'p_block_item_list','c_parser.py',1559), + ('block_item_list -> block_item_list block_item','block_item_list',2,'p_block_item_list','c_parser.py',1560), + ('compound_statement -> brace_open block_item_list_opt brace_close','compound_statement',3,'p_compound_statement_1','c_parser.py',1566), + ('labeled_statement -> ID COLON pragmacomp_or_statement','labeled_statement',3,'p_labeled_statement_1','c_parser.py',1572), + ('labeled_statement -> CASE constant_expression COLON pragmacomp_or_statement','labeled_statement',4,'p_labeled_statement_2','c_parser.py',1576), + ('labeled_statement -> DEFAULT COLON pragmacomp_or_statement','labeled_statement',3,'p_labeled_statement_3','c_parser.py',1580), + ('labeled_statement -> ID COLON','labeled_statement',2,'p_labeled_statement_4','c_parser.py',1584), + ('labeled_statement -> CASE constant_expression COLON','labeled_statement',3,'p_labeled_statement_5','c_parser.py',1588), + ('labeled_statement -> DEFAULT COLON','labeled_statement',2,'p_labeled_statement_6','c_parser.py',1592), + ('selection_statement -> IF LPAREN expression RPAREN pragmacomp_or_statement','selection_statement',5,'p_selection_statement_1','c_parser.py',1596), + ('selection_statement -> IF LPAREN expression RPAREN statement ELSE pragmacomp_or_statement','selection_statement',7,'p_selection_statement_2','c_parser.py',1600), + ('selection_statement -> SWITCH LPAREN expression RPAREN pragmacomp_or_statement','selection_statement',5,'p_selection_statement_3','c_parser.py',1604), + ('iteration_statement -> WHILE LPAREN expression RPAREN pragmacomp_or_statement','iteration_statement',5,'p_iteration_statement_1','c_parser.py',1609), + ('iteration_statement -> DO pragmacomp_or_statement WHILE LPAREN expression RPAREN SEMI','iteration_statement',7,'p_iteration_statement_2','c_parser.py',1613), + ('iteration_statement -> FOR LPAREN expression_opt SEMI expression_opt SEMI expression_opt RPAREN pragmacomp_or_statement','iteration_statement',9,'p_iteration_statement_3','c_parser.py',1617), + ('iteration_statement -> FOR LPAREN declaration expression_opt SEMI expression_opt RPAREN pragmacomp_or_statement','iteration_statement',8,'p_iteration_statement_4','c_parser.py',1621), + ('jump_statement -> GOTO ID SEMI','jump_statement',3,'p_jump_statement_1','c_parser.py',1626), + ('jump_statement -> BREAK SEMI','jump_statement',2,'p_jump_statement_2','c_parser.py',1630), + ('jump_statement -> CONTINUE SEMI','jump_statement',2,'p_jump_statement_3','c_parser.py',1634), + ('jump_statement -> RETURN expression SEMI','jump_statement',3,'p_jump_statement_4','c_parser.py',1638), + ('jump_statement -> RETURN SEMI','jump_statement',2,'p_jump_statement_4','c_parser.py',1639), + ('expression_statement -> expression_opt SEMI','expression_statement',2,'p_expression_statement','c_parser.py',1644), + ('expression -> assignment_expression','expression',1,'p_expression','c_parser.py',1651), + ('expression -> expression COMMA assignment_expression','expression',3,'p_expression','c_parser.py',1652), + ('assignment_expression -> LPAREN compound_statement RPAREN','assignment_expression',3,'p_parenthesized_compound_expression','c_parser.py',1664), + ('typedef_name -> TYPEID','typedef_name',1,'p_typedef_name','c_parser.py',1668), + ('assignment_expression -> conditional_expression','assignment_expression',1,'p_assignment_expression','c_parser.py',1672), + ('assignment_expression -> unary_expression assignment_operator assignment_expression','assignment_expression',3,'p_assignment_expression','c_parser.py',1673), + ('assignment_operator -> EQUALS','assignment_operator',1,'p_assignment_operator','c_parser.py',1686), + ('assignment_operator -> XOREQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1687), + ('assignment_operator -> TIMESEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1688), + ('assignment_operator -> DIVEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1689), + ('assignment_operator -> MODEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1690), + ('assignment_operator -> PLUSEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1691), + ('assignment_operator -> MINUSEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1692), + ('assignment_operator -> LSHIFTEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1693), + ('assignment_operator -> RSHIFTEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1694), + ('assignment_operator -> ANDEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1695), + ('assignment_operator -> OREQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1696), + ('constant_expression -> conditional_expression','constant_expression',1,'p_constant_expression','c_parser.py',1701), + ('conditional_expression -> binary_expression','conditional_expression',1,'p_conditional_expression','c_parser.py',1705), + ('conditional_expression -> binary_expression CONDOP expression COLON conditional_expression','conditional_expression',5,'p_conditional_expression','c_parser.py',1706), + ('binary_expression -> cast_expression','binary_expression',1,'p_binary_expression','c_parser.py',1714), + ('binary_expression -> binary_expression TIMES binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1715), + ('binary_expression -> binary_expression DIVIDE binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1716), + ('binary_expression -> binary_expression MOD binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1717), + ('binary_expression -> binary_expression PLUS binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1718), + ('binary_expression -> binary_expression MINUS binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1719), + ('binary_expression -> binary_expression RSHIFT binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1720), + ('binary_expression -> binary_expression LSHIFT binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1721), + ('binary_expression -> binary_expression LT binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1722), + ('binary_expression -> binary_expression LE binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1723), + ('binary_expression -> binary_expression GE binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1724), + ('binary_expression -> binary_expression GT binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1725), + ('binary_expression -> binary_expression EQ binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1726), + ('binary_expression -> binary_expression NE binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1727), + ('binary_expression -> binary_expression AND binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1728), + ('binary_expression -> binary_expression OR binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1729), + ('binary_expression -> binary_expression XOR binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1730), + ('binary_expression -> binary_expression LAND binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1731), + ('binary_expression -> binary_expression LOR binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1732), + ('cast_expression -> unary_expression','cast_expression',1,'p_cast_expression_1','c_parser.py',1740), + ('cast_expression -> LPAREN type_name RPAREN cast_expression','cast_expression',4,'p_cast_expression_2','c_parser.py',1744), + ('unary_expression -> postfix_expression','unary_expression',1,'p_unary_expression_1','c_parser.py',1748), + ('unary_expression -> PLUSPLUS unary_expression','unary_expression',2,'p_unary_expression_2','c_parser.py',1752), + ('unary_expression -> MINUSMINUS unary_expression','unary_expression',2,'p_unary_expression_2','c_parser.py',1753), + ('unary_expression -> unary_operator cast_expression','unary_expression',2,'p_unary_expression_2','c_parser.py',1754), + ('unary_expression -> SIZEOF unary_expression','unary_expression',2,'p_unary_expression_3','c_parser.py',1759), + ('unary_expression -> SIZEOF LPAREN type_name RPAREN','unary_expression',4,'p_unary_expression_3','c_parser.py',1760), + ('unary_expression -> _ALIGNOF LPAREN type_name RPAREN','unary_expression',4,'p_unary_expression_3','c_parser.py',1761), + ('unary_operator -> AND','unary_operator',1,'p_unary_operator','c_parser.py',1769), + ('unary_operator -> TIMES','unary_operator',1,'p_unary_operator','c_parser.py',1770), + ('unary_operator -> PLUS','unary_operator',1,'p_unary_operator','c_parser.py',1771), + ('unary_operator -> MINUS','unary_operator',1,'p_unary_operator','c_parser.py',1772), + ('unary_operator -> NOT','unary_operator',1,'p_unary_operator','c_parser.py',1773), + ('unary_operator -> LNOT','unary_operator',1,'p_unary_operator','c_parser.py',1774), + ('postfix_expression -> primary_expression','postfix_expression',1,'p_postfix_expression_1','c_parser.py',1779), + ('postfix_expression -> postfix_expression LBRACKET expression RBRACKET','postfix_expression',4,'p_postfix_expression_2','c_parser.py',1783), + ('postfix_expression -> postfix_expression LPAREN argument_expression_list RPAREN','postfix_expression',4,'p_postfix_expression_3','c_parser.py',1787), + ('postfix_expression -> postfix_expression LPAREN RPAREN','postfix_expression',3,'p_postfix_expression_3','c_parser.py',1788), + ('postfix_expression -> postfix_expression PERIOD ID','postfix_expression',3,'p_postfix_expression_4','c_parser.py',1793), + ('postfix_expression -> postfix_expression PERIOD TYPEID','postfix_expression',3,'p_postfix_expression_4','c_parser.py',1794), + ('postfix_expression -> postfix_expression ARROW ID','postfix_expression',3,'p_postfix_expression_4','c_parser.py',1795), + ('postfix_expression -> postfix_expression ARROW TYPEID','postfix_expression',3,'p_postfix_expression_4','c_parser.py',1796), + ('postfix_expression -> postfix_expression PLUSPLUS','postfix_expression',2,'p_postfix_expression_5','c_parser.py',1802), + ('postfix_expression -> postfix_expression MINUSMINUS','postfix_expression',2,'p_postfix_expression_5','c_parser.py',1803), + ('postfix_expression -> LPAREN type_name RPAREN brace_open initializer_list brace_close','postfix_expression',6,'p_postfix_expression_6','c_parser.py',1808), + ('postfix_expression -> LPAREN type_name RPAREN brace_open initializer_list COMMA brace_close','postfix_expression',7,'p_postfix_expression_6','c_parser.py',1809), + ('primary_expression -> identifier','primary_expression',1,'p_primary_expression_1','c_parser.py',1814), + ('primary_expression -> constant','primary_expression',1,'p_primary_expression_2','c_parser.py',1818), + ('primary_expression -> unified_string_literal','primary_expression',1,'p_primary_expression_3','c_parser.py',1822), + ('primary_expression -> unified_wstring_literal','primary_expression',1,'p_primary_expression_3','c_parser.py',1823), + ('primary_expression -> LPAREN expression RPAREN','primary_expression',3,'p_primary_expression_4','c_parser.py',1828), + ('primary_expression -> OFFSETOF LPAREN type_name COMMA offsetof_member_designator RPAREN','primary_expression',6,'p_primary_expression_5','c_parser.py',1832), + ('offsetof_member_designator -> identifier','offsetof_member_designator',1,'p_offsetof_member_designator','c_parser.py',1840), + ('offsetof_member_designator -> offsetof_member_designator PERIOD identifier','offsetof_member_designator',3,'p_offsetof_member_designator','c_parser.py',1841), + ('offsetof_member_designator -> offsetof_member_designator LBRACKET expression RBRACKET','offsetof_member_designator',4,'p_offsetof_member_designator','c_parser.py',1842), + ('argument_expression_list -> assignment_expression','argument_expression_list',1,'p_argument_expression_list','c_parser.py',1854), + ('argument_expression_list -> argument_expression_list COMMA assignment_expression','argument_expression_list',3,'p_argument_expression_list','c_parser.py',1855), + ('identifier -> ID','identifier',1,'p_identifier','c_parser.py',1864), + ('constant -> INT_CONST_DEC','constant',1,'p_constant_1','c_parser.py',1868), + ('constant -> INT_CONST_OCT','constant',1,'p_constant_1','c_parser.py',1869), + ('constant -> INT_CONST_HEX','constant',1,'p_constant_1','c_parser.py',1870), + ('constant -> INT_CONST_BIN','constant',1,'p_constant_1','c_parser.py',1871), + ('constant -> INT_CONST_CHAR','constant',1,'p_constant_1','c_parser.py',1872), + ('constant -> FLOAT_CONST','constant',1,'p_constant_2','c_parser.py',1891), + ('constant -> HEX_FLOAT_CONST','constant',1,'p_constant_2','c_parser.py',1892), + ('constant -> CHAR_CONST','constant',1,'p_constant_3','c_parser.py',1905), + ('constant -> WCHAR_CONST','constant',1,'p_constant_3','c_parser.py',1906), + ('constant -> U8CHAR_CONST','constant',1,'p_constant_3','c_parser.py',1907), + ('constant -> U16CHAR_CONST','constant',1,'p_constant_3','c_parser.py',1908), + ('constant -> U32CHAR_CONST','constant',1,'p_constant_3','c_parser.py',1909), + ('unified_string_literal -> STRING_LITERAL','unified_string_literal',1,'p_unified_string_literal','c_parser.py',1920), + ('unified_string_literal -> unified_string_literal STRING_LITERAL','unified_string_literal',2,'p_unified_string_literal','c_parser.py',1921), + ('unified_wstring_literal -> WSTRING_LITERAL','unified_wstring_literal',1,'p_unified_wstring_literal','c_parser.py',1931), + ('unified_wstring_literal -> U8STRING_LITERAL','unified_wstring_literal',1,'p_unified_wstring_literal','c_parser.py',1932), + ('unified_wstring_literal -> U16STRING_LITERAL','unified_wstring_literal',1,'p_unified_wstring_literal','c_parser.py',1933), + ('unified_wstring_literal -> U32STRING_LITERAL','unified_wstring_literal',1,'p_unified_wstring_literal','c_parser.py',1934), + ('unified_wstring_literal -> unified_wstring_literal WSTRING_LITERAL','unified_wstring_literal',2,'p_unified_wstring_literal','c_parser.py',1935), + ('unified_wstring_literal -> unified_wstring_literal U8STRING_LITERAL','unified_wstring_literal',2,'p_unified_wstring_literal','c_parser.py',1936), + ('unified_wstring_literal -> unified_wstring_literal U16STRING_LITERAL','unified_wstring_literal',2,'p_unified_wstring_literal','c_parser.py',1937), + ('unified_wstring_literal -> unified_wstring_literal U32STRING_LITERAL','unified_wstring_literal',2,'p_unified_wstring_literal','c_parser.py',1938), + ('brace_open -> LBRACE','brace_open',1,'p_brace_open','c_parser.py',1948), + ('brace_close -> RBRACE','brace_close',1,'p_brace_close','c_parser.py',1954), + ('empty -> ','empty',0,'p_empty','c_parser.py',1960), +] diff --git a/venv/lib/python3.12/site-packages/pyopenssl-25.3.0.dist-info/INSTALLER b/venv/lib/python3.12/site-packages/pyopenssl-25.3.0.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pyopenssl-25.3.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.12/site-packages/pyopenssl-25.3.0.dist-info/METADATA b/venv/lib/python3.12/site-packages/pyopenssl-25.3.0.dist-info/METADATA new file mode 100644 index 00000000..5c46dbe1 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pyopenssl-25.3.0.dist-info/METADATA @@ -0,0 +1,525 @@ +Metadata-Version: 2.4 +Name: pyOpenSSL +Version: 25.3.0 +Summary: Python wrapper module around the OpenSSL library +Home-page: https://pyopenssl.org/ +Author: The pyOpenSSL developers +Author-email: cryptography-dev@python.org +License: Apache License, Version 2.0 +Project-URL: Source, https://github.com/pyca/pyopenssl +Classifier: Development Status :: 6 - Mature +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Operating System :: Microsoft :: Windows +Classifier: Operating System :: POSIX +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Security :: Cryptography +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: System :: Networking +Requires-Python: >=3.7 +License-File: LICENSE +Requires-Dist: cryptography<47,>=45.0.7 +Requires-Dist: typing-extensions>=4.9; python_version < "3.13" and python_version >= "3.8" +Provides-Extra: test +Requires-Dist: pytest-rerunfailures; extra == "test" +Requires-Dist: pretend; extra == "test" +Requires-Dist: pytest>=3.0.1; extra == "test" +Provides-Extra: docs +Requires-Dist: sphinx!=5.2.0,!=5.2.0.post0,!=7.2.5; extra == "docs" +Requires-Dist: sphinx_rtd_theme; extra == "docs" +Dynamic: author +Dynamic: author-email +Dynamic: classifier +Dynamic: description +Dynamic: home-page +Dynamic: license +Dynamic: license-file +Dynamic: project-url +Dynamic: provides-extra +Dynamic: requires-dist +Dynamic: requires-python +Dynamic: summary + +======================================================== +pyOpenSSL -- A Python wrapper around the OpenSSL library +======================================================== + +.. image:: https://readthedocs.org/projects/pyopenssl/badge/?version=stable + :target: https://pyopenssl.org/en/stable/ + :alt: Stable Docs + +.. image:: https://github.com/pyca/pyopenssl/workflows/CI/badge.svg?branch=main + :target: https://github.com/pyca/pyopenssl/actions?query=workflow%3ACI+branch%3Amain + +**Note:** The Python Cryptographic Authority **strongly suggests** the use of `pyca/cryptography`_ +where possible. If you are using pyOpenSSL for anything other than making a TLS connection +**you should move to cryptography and drop your pyOpenSSL dependency**. + +High-level wrapper around a subset of the OpenSSL library. Includes + +* ``SSL.Connection`` objects, wrapping the methods of Python's portable sockets +* Callbacks written in Python +* Extensive error-handling mechanism, mirroring OpenSSL's error codes + +... and much more. + +You can find more information in the documentation_. +Development takes place on GitHub_. + + +Discussion +========== + +If you run into bugs, you can file them in our `issue tracker`_. + +We maintain a cryptography-dev_ mailing list for both user and development discussions. + +You can also join ``#pyca`` on ``irc.libera.chat`` to ask questions or get involved. + + +.. _documentation: https://pyopenssl.org/ +.. _`issue tracker`: https://github.com/pyca/pyopenssl/issues +.. _cryptography-dev: https://mail.python.org/mailman/listinfo/cryptography-dev +.. _GitHub: https://github.com/pyca/pyopenssl +.. _`pyca/cryptography`: https://github.com/pyca/cryptography + + +Release Information +=================== + +25.4.0 (UNRELEASED) +------------------- + +Backward-incompatible changes: +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Deprecations: +^^^^^^^^^^^^^ + +Changes: +^^^^^^^^ + +25.3.0 (2025-09-16) +------------------- + +Backward-incompatible changes: +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Deprecations: +^^^^^^^^^^^^^ + +Changes: +^^^^^^^^ + +- Maximum supported ``cryptography`` version is now 46.x. + + +25.2.0 (2025-09-14) +------------------- + +Backward-incompatible changes: +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +- The minimum ``cryptography`` version is now 45.0.7. + +Deprecations: +^^^^^^^^^^^^^ + +Changes: +^^^^^^^^ + +- pyOpenSSL now sets ``SSL_MODE_ACCEPT_MOVING_WRITE_BUFFER`` on connections by default, matching CPython's behavior. +- Added ``OpenSSL.SSL.Context.clear_mode``. +- Added ``OpenSSL.SSL.Context.set_tls13_ciphersuites`` to set the allowed TLS 1.3 ciphers. +- Added ``OpenSSL.SSL.Connection.set_info_callback`` + +25.1.0 (2025-05-17) +------------------- + +Backward-incompatible changes: +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Deprecations: +^^^^^^^^^^^^^ + +- Attempting using any methods that mutate an ``OpenSSL.SSL.Context`` after it + has been used to create an ``OpenSSL.SSL.Connection`` will emit a warning. In + a future release, this will raise an exception. + +Changes: +^^^^^^^^ + +* ``cryptography`` maximum version has been increased to 45.0.x. + + +25.0.0 (2025-01-12) +------------------- + +Backward-incompatible changes: +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Deprecations: +^^^^^^^^^^^^^ + +Changes: +^^^^^^^^ + +- Corrected type annotations on ``Context.set_alpn_select_callback``, ``Context.set_session_cache_mode``, ``Context.set_options``, ``Context.set_mode``, ``X509.subject_name_hash``, and ``X509Store.load_locations``. +- Deprecated APIs are now marked using ``warnings.deprecated``. ``mypy`` will emit deprecation notices for them when used with ``--enable-error-code deprecated``. + +24.3.0 (2024-11-27) +------------------- + +Backward-incompatible changes: +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +- Removed the deprecated ``OpenSSL.crypto.CRL``, ``OpenSSL.crypto.Revoked``, ``OpenSSL.crypto.dump_crl``, and ``OpenSSL.crypto.load_crl``. ``cryptography.x509``'s CRL functionality should be used instead. +- Removed the deprecated ``OpenSSL.crypto.sign`` and ``OpenSSL.crypto.verify``. ``cryptography.hazmat.primitives.asymmetric``'s signature APIs should be used instead. + +Deprecations: +^^^^^^^^^^^^^ + +- Deprecated ``OpenSSL.rand`` - callers should use ``os.urandom()`` instead. +- Deprecated ``add_extensions`` and ``get_extensions`` on ``OpenSSL.crypto.X509Req`` and ``OpenSSL.crypto.X509``. These should have been deprecated at the same time ``X509Extension`` was. Users should use pyca/cryptography's X.509 APIs instead. +- Deprecated ``OpenSSL.crypto.get_elliptic_curves`` and ``OpenSSL.crypto.get_elliptic_curve``, as well as passing the reult of them to ``OpenSSL.SSL.Context.set_tmp_ecdh``, users should instead pass curves from ``cryptography``. +- Deprecated passing ``X509`` objects to ``OpenSSL.SSL.Context.use_certificate``, ``OpenSSL.SSL.Connection.use_certificate``, ``OpenSSL.SSL.Context.add_extra_chain_cert``, and ``OpenSSL.SSL.Context.add_client_ca``, users should instead pass ``cryptography.x509.Certificate`` instances. This is in preparation for deprecating pyOpenSSL's ``X509`` entirely. +- Deprecated passing ``PKey`` objects to ``OpenSSL.SSL.Context.use_privatekey`` and ``OpenSSL.SSL.Connection.use_privatekey``, users should instead pass ``cryptography`` priate key instances. This is in preparation for deprecating pyOpenSSL's ``PKey`` entirely. + +Changes: +^^^^^^^^ + +* ``cryptography`` maximum version has been increased to 44.0.x. +* ``OpenSSL.SSL.Connection.get_certificate``, ``OpenSSL.SSL.Connection.get_peer_certificate``, ``OpenSSL.SSL.Connection.get_peer_cert_chain``, and ``OpenSSL.SSL.Connection.get_verified_chain`` now take an ``as_cryptography`` keyword-argument. When ``True`` is passed then ``cryptography.x509.Certificate`` are returned, instead of ``OpenSSL.crypto.X509``. In the future, passing ``False`` (the default) will be deprecated. + + +24.2.1 (2024-07-20) +------------------- + +Backward-incompatible changes: +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Deprecations: +^^^^^^^^^^^^^ + +Changes: +^^^^^^^^ + +- Fixed changelog to remove sphinx specific restructured text strings. + + +24.2.0 (2024-07-20) +------------------- + +Backward-incompatible changes: +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Deprecations: +^^^^^^^^^^^^^ + +- Deprecated ``OpenSSL.crypto.X509Req``, ``OpenSSL.crypto.load_certificate_request``, ``OpenSSL.crypto.dump_certificate_request``. Instead, ``cryptography.x509.CertificateSigningRequest``, ``cryptography.x509.CertificateSigningRequestBuilder``, ``cryptography.x509.load_der_x509_csr``, or ``cryptography.x509.load_pem_x509_csr`` should be used. + +Changes: +^^^^^^^^ + +- Added type hints for the ``SSL`` module. + `#1308 `_. +- Changed ``OpenSSL.crypto.PKey.from_cryptography_key`` to accept public and private EC, ED25519, ED448 keys. + `#1310 `_. + +24.1.0 (2024-03-09) +------------------- + +Backward-incompatible changes: +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +* Removed the deprecated ``OpenSSL.crypto.PKCS12`` and + ``OpenSSL.crypto.NetscapeSPKI``. ``OpenSSL.crypto.PKCS12`` may be replaced + by the PKCS#12 APIs in the ``cryptography`` package. + +Deprecations: +^^^^^^^^^^^^^ + +Changes: +^^^^^^^^ + +24.0.0 (2024-01-22) +------------------- + +Backward-incompatible changes: +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Deprecations: +^^^^^^^^^^^^^ + +Changes: +^^^^^^^^ + +- Added ``OpenSSL.SSL.Connection.get_selected_srtp_profile`` to determine which SRTP profile was negotiated. + `#1279 `_. + +23.3.0 (2023-10-25) +------------------- + +Backward-incompatible changes: +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +- Dropped support for Python 3.6. +- The minimum ``cryptography`` version is now 41.0.5. +- Removed ``OpenSSL.crypto.load_pkcs7`` and ``OpenSSL.crypto.load_pkcs12`` which had been deprecated for 3 years. +- Added ``OpenSSL.SSL.OP_LEGACY_SERVER_CONNECT`` to allow legacy insecure renegotiation between OpenSSL and unpatched servers. + `#1234 `_. + +Deprecations: +^^^^^^^^^^^^^ + +- Deprecated ``OpenSSL.crypto.PKCS12`` (which was intended to have been deprecated at the same time as ``OpenSSL.crypto.load_pkcs12``). +- Deprecated ``OpenSSL.crypto.NetscapeSPKI``. +- Deprecated ``OpenSSL.crypto.CRL`` +- Deprecated ``OpenSSL.crypto.Revoked`` +- Deprecated ``OpenSSL.crypto.load_crl`` and ``OpenSSL.crypto.dump_crl`` +- Deprecated ``OpenSSL.crypto.sign`` and ``OpenSSL.crypto.verify`` +- Deprecated ``OpenSSL.crypto.X509Extension`` + +Changes: +^^^^^^^^ + +- Changed ``OpenSSL.crypto.X509Store.add_crl`` to also accept + ``cryptography``'s ``x509.CertificateRevocationList`` arguments in addition + to the now deprecated ``OpenSSL.crypto.CRL`` arguments. +- Fixed ``test_set_default_verify_paths`` test so that it is skipped if no + network connection is available. + +23.2.0 (2023-05-30) +------------------- + +Backward-incompatible changes: +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +- Removed ``X509StoreFlags.NOTIFY_POLICY``. + `#1213 `_. + +Deprecations: +^^^^^^^^^^^^^ + +Changes: +^^^^^^^^ + +- ``cryptography`` maximum version has been increased to 41.0.x. +- Invalid versions are now rejected in ``OpenSSL.crypto.X509Req.set_version``. +- Added ``X509VerificationCodes`` to ``OpenSSL.SSL``. + `#1202 `_. + +23.1.1 (2023-03-28) +------------------- + +Backward-incompatible changes: +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Deprecations: +^^^^^^^^^^^^^ + +Changes: +^^^^^^^^ + +- Worked around an issue in OpenSSL 3.1.0 which caused `X509Extension.get_short_name` to raise an exception when no short name was known to OpenSSL. + `#1204 `_. + +23.1.0 (2023-03-24) +------------------- + +Backward-incompatible changes: +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Deprecations: +^^^^^^^^^^^^^ + +Changes: +^^^^^^^^ + +- ``cryptography`` maximum version has been increased to 40.0.x. +- Add ``OpenSSL.SSL.Connection.DTLSv1_get_timeout`` and ``OpenSSL.SSL.Connection.DTLSv1_handle_timeout`` + to support DTLS timeouts `#1180 `_. + +23.0.0 (2023-01-01) +------------------- + +Backward-incompatible changes: +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Deprecations: +^^^^^^^^^^^^^ + +Changes: +^^^^^^^^ + +- Add ``OpenSSL.SSL.X509StoreFlags.PARTIAL_CHAIN`` constant to allow for users + to perform certificate verification on partial certificate chains. + `#1166 `_ +- ``cryptography`` maximum version has been increased to 39.0.x. + +22.1.0 (2022-09-25) +------------------- + +Backward-incompatible changes: +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +- Remove support for SSLv2 and SSLv3. +- The minimum ``cryptography`` version is now 38.0.x (and we now pin releases + against ``cryptography`` major versions to prevent future breakage) +- The ``OpenSSL.crypto.X509StoreContextError`` exception has been refactored, + changing its internal attributes. + `#1133 `_ + +Deprecations: +^^^^^^^^^^^^^ + +- ``OpenSSL.SSL.SSLeay_version`` is deprecated in favor of + ``OpenSSL.SSL.OpenSSL_version``. The constants ``OpenSSL.SSL.SSLEAY_*`` are + deprecated in favor of ``OpenSSL.SSL.OPENSSL_*``. + +Changes: +^^^^^^^^ + +- Add ``OpenSSL.SSL.Connection.set_verify`` and ``OpenSSL.SSL.Connection.get_verify_mode`` + to override the context object's verification flags. + `#1073 `_ +- Add ``OpenSSL.SSL.Connection.use_certificate`` and ``OpenSSL.SSL.Connection.use_privatekey`` + to set a certificate per connection (and not just per context) `#1121 `_. + +22.0.0 (2022-01-29) +------------------- + +Backward-incompatible changes: +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +- Drop support for Python 2.7. + `#1047 `_ +- The minimum ``cryptography`` version is now 35.0. + +Deprecations: +^^^^^^^^^^^^^ + +Changes: +^^^^^^^^ + +- Expose wrappers for some `DTLS + `_ + primitives. `#1026 `_ + +21.0.0 (2021-09-28) +------------------- + +Backward-incompatible changes: +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +- The minimum ``cryptography`` version is now 3.3. +- Drop support for Python 3.5 + +Deprecations: +^^^^^^^^^^^^^ + +Changes: +^^^^^^^^ + +- Raise an error when an invalid ALPN value is set. + `#993 `_ +- Added ``OpenSSL.SSL.Context.set_min_proto_version`` and ``OpenSSL.SSL.Context.set_max_proto_version`` + to set the minimum and maximum supported TLS version `#985 `_. +- Updated ``to_cryptography`` and ``from_cryptography`` methods to support an upcoming release of ``cryptography`` without raising deprecation warnings. + `#1030 `_ + +20.0.1 (2020-12-15) +------------------- + +Backward-incompatible changes: +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Deprecations: +^^^^^^^^^^^^^ + +Changes: +^^^^^^^^ + +- Fixed compatibility with OpenSSL 1.1.0. + +20.0.0 (2020-11-27) +------------------- + + +Backward-incompatible changes: +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +- The minimum ``cryptography`` version is now 3.2. +- Remove deprecated ``OpenSSL.tsafe`` module. +- Removed deprecated ``OpenSSL.SSL.Context.set_npn_advertise_callback``, ``OpenSSL.SSL.Context.set_npn_select_callback``, and ``OpenSSL.SSL.Connection.get_next_proto_negotiated``. +- Drop support for Python 3.4 +- Drop support for OpenSSL 1.0.1 and 1.0.2 + +Deprecations: +^^^^^^^^^^^^^ + +- Deprecated ``OpenSSL.crypto.load_pkcs7`` and ``OpenSSL.crypto.load_pkcs12``. + +Changes: +^^^^^^^^ + +- Added a new optional ``chain`` parameter to ``OpenSSL.crypto.X509StoreContext()`` + where additional untrusted certificates can be specified to help chain building. + `#948 `_ +- Added ``OpenSSL.crypto.X509Store.load_locations`` to set trusted + certificate file bundles and/or directories for verification. + `#943 `_ +- Added ``Context.set_keylog_callback`` to log key material. + `#910 `_ +- Added ``OpenSSL.SSL.Connection.get_verified_chain`` to retrieve the + verified certificate chain of the peer. + `#894 `_. +- Make verification callback optional in ``Context.set_verify``. + If omitted, OpenSSL's default verification is used. + `#933 `_ +- Fixed a bug that could truncate or cause a zero-length key error due to a + null byte in private key passphrase in ``OpenSSL.crypto.load_privatekey`` + and ``OpenSSL.crypto.dump_privatekey``. + `#947 `_ + +19.1.0 (2019-11-18) +------------------- + + +Backward-incompatible changes: +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +- Removed deprecated ``ContextType``, ``ConnectionType``, ``PKeyType``, ``X509NameType``, ``X509ReqType``, ``X509Type``, ``X509StoreType``, ``CRLType``, ``PKCS7Type``, ``PKCS12Type``, and ``NetscapeSPKIType`` aliases. + Use the classes without the ``Type`` suffix instead. + `#814 `_ +- The minimum ``cryptography`` version is now 2.8 due to issues on macOS with a transitive dependency. + `#875 `_ + +Deprecations: +^^^^^^^^^^^^^ + +- Deprecated ``OpenSSL.SSL.Context.set_npn_advertise_callback``, ``OpenSSL.SSL.Context.set_npn_select_callback``, and ``OpenSSL.SSL.Connection.get_next_proto_negotiated``. + ALPN should be used instead. + `#820 `_ + + +Changes: +^^^^^^^^ + +- Support ``bytearray`` in ``SSL.Connection.send()`` by using cffi's from_buffer. + `#852 `_ +- The ``OpenSSL.SSL.Context.set_alpn_select_callback`` can return a new ``NO_OVERLAPPING_PROTOCOLS`` sentinel value + to allow a TLS handshake to complete without an application protocol. + +`Full changelog `_. + diff --git a/venv/lib/python3.12/site-packages/pyopenssl-25.3.0.dist-info/RECORD b/venv/lib/python3.12/site-packages/pyopenssl-25.3.0.dist-info/RECORD new file mode 100644 index 00000000..12af97d3 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pyopenssl-25.3.0.dist-info/RECORD @@ -0,0 +1,22 @@ +OpenSSL/SSL.py,sha256=KzyeY0OEOTCiASBbgYC5AEEQ0qFNDv8JqEqfDz2CmW4,115717 +OpenSSL/__init__.py,sha256=46ENrQmALeGbnIoOeFaa3xEiaGcmoYPa16Dfdp6hMio,497 +OpenSSL/__pycache__/SSL.cpython-312.pyc,, +OpenSSL/__pycache__/__init__.cpython-312.pyc,, +OpenSSL/__pycache__/_util.cpython-312.pyc,, +OpenSSL/__pycache__/crypto.cpython-312.pyc,, +OpenSSL/__pycache__/debug.cpython-312.pyc,, +OpenSSL/__pycache__/rand.cpython-312.pyc,, +OpenSSL/__pycache__/version.cpython-312.pyc,, +OpenSSL/_util.py,sha256=FUIjL9tiCFmyQBBvICPcGd6vqjCmvx-OFKhHEVM-pZE,3692 +OpenSSL/crypto.py,sha256=KxbXAitSJDMQpZ0dsD25osnvgwqrWQ3m1YMK5uLzEhU,79657 +OpenSSL/debug.py,sha256=vCl77f2MslUoTRSu9fqP5DL_9DK_RSC7Jpu07Ip9gQM,1008 +OpenSSL/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +OpenSSL/rand.py,sha256=tSxdA95ITOS24rLCI-tE_hm4V8-i68d6nDGFt4vROQM,1252 +OpenSSL/version.py,sha256=sBsBulyckMCs-65_YlxSzhnC56644kYqbdyQvFIUHps,641 +pyopenssl-25.3.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +pyopenssl-25.3.0.dist-info/METADATA,sha256=GvHLx6jxAeyBfkStQxKGIc5ZVEyOXEFYUn2n2hDzmBY,17950 +pyopenssl-25.3.0.dist-info/RECORD,, +pyopenssl-25.3.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pyopenssl-25.3.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91 +pyopenssl-25.3.0.dist-info/licenses/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358 +pyopenssl-25.3.0.dist-info/top_level.txt,sha256=NNxWqS8hKNJh2cUXa1RZOMX62VJfyd8URo1TsYnR_MU,8 diff --git a/venv/lib/python3.12/site-packages/pyopenssl-25.3.0.dist-info/REQUESTED b/venv/lib/python3.12/site-packages/pyopenssl-25.3.0.dist-info/REQUESTED new file mode 100644 index 00000000..e69de29b diff --git a/venv/lib/python3.12/site-packages/pyopenssl-25.3.0.dist-info/WHEEL b/venv/lib/python3.12/site-packages/pyopenssl-25.3.0.dist-info/WHEEL new file mode 100644 index 00000000..e7fa31b6 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pyopenssl-25.3.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: setuptools (80.9.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/lib/python3.12/site-packages/pyopenssl-25.3.0.dist-info/licenses/LICENSE b/venv/lib/python3.12/site-packages/pyopenssl-25.3.0.dist-info/licenses/LICENSE new file mode 100644 index 00000000..d6456956 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pyopenssl-25.3.0.dist-info/licenses/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/venv/lib/python3.12/site-packages/pyopenssl-25.3.0.dist-info/top_level.txt b/venv/lib/python3.12/site-packages/pyopenssl-25.3.0.dist-info/top_level.txt new file mode 100644 index 00000000..effce34b --- /dev/null +++ b/venv/lib/python3.12/site-packages/pyopenssl-25.3.0.dist-info/top_level.txt @@ -0,0 +1 @@ +OpenSSL diff --git a/venv/lib/python3.12/site-packages/pyyaml-6.0.3.dist-info/INSTALLER b/venv/lib/python3.12/site-packages/pyyaml-6.0.3.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pyyaml-6.0.3.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.12/site-packages/pyyaml-6.0.3.dist-info/METADATA b/venv/lib/python3.12/site-packages/pyyaml-6.0.3.dist-info/METADATA new file mode 100644 index 00000000..702ee6da --- /dev/null +++ b/venv/lib/python3.12/site-packages/pyyaml-6.0.3.dist-info/METADATA @@ -0,0 +1,59 @@ +Metadata-Version: 2.4 +Name: PyYAML +Version: 6.0.3 +Summary: YAML parser and emitter for Python +Home-page: https://pyyaml.org/ +Download-URL: https://pypi.org/project/PyYAML/ +Author: Kirill Simonov +Author-email: xi@resolvent.net +License: MIT +Project-URL: Bug Tracker, https://github.com/yaml/pyyaml/issues +Project-URL: CI, https://github.com/yaml/pyyaml/actions +Project-URL: Documentation, https://pyyaml.org/wiki/PyYAMLDocumentation +Project-URL: Mailing lists, http://lists.sourceforge.net/lists/listinfo/yaml-core +Project-URL: Source Code, https://github.com/yaml/pyyaml +Platform: Any +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Cython +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: 3.14 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Text Processing :: Markup +Requires-Python: >=3.8 +License-File: LICENSE +Dynamic: author +Dynamic: author-email +Dynamic: classifier +Dynamic: description +Dynamic: download-url +Dynamic: home-page +Dynamic: license +Dynamic: license-file +Dynamic: platform +Dynamic: project-url +Dynamic: requires-python +Dynamic: summary + +YAML is a data serialization format designed for human readability +and interaction with scripting languages. PyYAML is a YAML parser +and emitter for Python. + +PyYAML features a complete YAML 1.1 parser, Unicode support, pickle +support, capable extension API, and sensible error messages. PyYAML +supports standard YAML tags and provides Python-specific tags that +allow to represent an arbitrary Python object. + +PyYAML is applicable for a broad range of tasks from complex +configuration files to object serialization and persistence. diff --git a/venv/lib/python3.12/site-packages/pyyaml-6.0.3.dist-info/RECORD b/venv/lib/python3.12/site-packages/pyyaml-6.0.3.dist-info/RECORD new file mode 100644 index 00000000..5d439d0f --- /dev/null +++ b/venv/lib/python3.12/site-packages/pyyaml-6.0.3.dist-info/RECORD @@ -0,0 +1,44 @@ +_yaml/__init__.py,sha256=04Ae_5osxahpJHa3XBZUAf4wi6XX32gR8D6X6p64GEA,1402 +_yaml/__pycache__/__init__.cpython-312.pyc,, +pyyaml-6.0.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +pyyaml-6.0.3.dist-info/METADATA,sha256=A8O0Fe040J-u3Ek2DpMHabQMWPaFhebeAOLkkWqFjTQ,2351 +pyyaml-6.0.3.dist-info/RECORD,, +pyyaml-6.0.3.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pyyaml-6.0.3.dist-info/WHEEL,sha256=DxRnWQz-Kp9-4a4hdDHsSv0KUC3H7sN9Nbef3-8RjXU,190 +pyyaml-6.0.3.dist-info/licenses/LICENSE,sha256=jTko-dxEkP1jVwfLiOsmvXZBAqcoKVQwfT5RZ6V36KQ,1101 +pyyaml-6.0.3.dist-info/top_level.txt,sha256=rpj0IVMTisAjh_1vG3Ccf9v5jpCQwAz6cD1IVU5ZdhQ,11 +yaml/__init__.py,sha256=sZ38wzPWp139cwc5ARZFByUvJxtB07X32FUQAzoFR6c,12311 +yaml/__pycache__/__init__.cpython-312.pyc,, +yaml/__pycache__/composer.cpython-312.pyc,, +yaml/__pycache__/constructor.cpython-312.pyc,, +yaml/__pycache__/cyaml.cpython-312.pyc,, +yaml/__pycache__/dumper.cpython-312.pyc,, +yaml/__pycache__/emitter.cpython-312.pyc,, +yaml/__pycache__/error.cpython-312.pyc,, +yaml/__pycache__/events.cpython-312.pyc,, +yaml/__pycache__/loader.cpython-312.pyc,, +yaml/__pycache__/nodes.cpython-312.pyc,, +yaml/__pycache__/parser.cpython-312.pyc,, +yaml/__pycache__/reader.cpython-312.pyc,, +yaml/__pycache__/representer.cpython-312.pyc,, +yaml/__pycache__/resolver.cpython-312.pyc,, +yaml/__pycache__/scanner.cpython-312.pyc,, +yaml/__pycache__/serializer.cpython-312.pyc,, +yaml/__pycache__/tokens.cpython-312.pyc,, +yaml/_yaml.cpython-312-x86_64-linux-gnu.so,sha256=lXoJmkUhwfdmkwb7b3nKY_pLmgtMRj98rIM6ZaXFwMs,2679264 +yaml/composer.py,sha256=_Ko30Wr6eDWUeUpauUGT3Lcg9QPBnOPVlTnIMRGJ9FM,4883 +yaml/constructor.py,sha256=kNgkfaeLUkwQYY_Q6Ff1Tz2XVw_pG1xVE9Ak7z-viLA,28639 +yaml/cyaml.py,sha256=6ZrAG9fAYvdVe2FK_w0hmXoG7ZYsoYUwapG8CiC72H0,3851 +yaml/dumper.py,sha256=PLctZlYwZLp7XmeUdwRuv4nYOZ2UBnDIUy8-lKfLF-o,2837 +yaml/emitter.py,sha256=jghtaU7eFwg31bG0B7RZea_29Adi9CKmXq_QjgQpCkQ,43006 +yaml/error.py,sha256=Ah9z-toHJUbE9j-M8YpxgSRM5CgLCcwVzJgLLRF2Fxo,2533 +yaml/events.py,sha256=50_TksgQiE4up-lKo_V-nBy-tAIxkIPQxY5qDhKCeHw,2445 +yaml/loader.py,sha256=UVa-zIqmkFSCIYq_PgSGm4NSJttHY2Rf_zQ4_b1fHN0,2061 +yaml/nodes.py,sha256=gPKNj8pKCdh2d4gr3gIYINnPOaOxGhJAUiYhGRnPE84,1440 +yaml/parser.py,sha256=ilWp5vvgoHFGzvOZDItFoGjD6D42nhlZrZyjAwa0oJo,25495 +yaml/reader.py,sha256=0dmzirOiDG4Xo41RnuQS7K9rkY3xjHiVasfDMNTqCNw,6794 +yaml/representer.py,sha256=IuWP-cAW9sHKEnS0gCqSa894k1Bg4cgTxaDwIcbRQ-Y,14190 +yaml/resolver.py,sha256=9L-VYfm4mWHxUD1Vg4X7rjDRK_7VZd6b92wzq7Y2IKY,9004 +yaml/scanner.py,sha256=YEM3iLZSaQwXcQRg2l2R4MdT0zGP2F9eHkKGKnHyWQY,51279 +yaml/serializer.py,sha256=ChuFgmhU01hj4xgI8GaKv6vfM2Bujwa9i7d2FAHj7cA,4165 +yaml/tokens.py,sha256=lTQIzSVw8Mg9wv459-TjiOQe6wVziqaRlqX2_89rp54,2573 diff --git a/venv/lib/python3.12/site-packages/pyyaml-6.0.3.dist-info/REQUESTED b/venv/lib/python3.12/site-packages/pyyaml-6.0.3.dist-info/REQUESTED new file mode 100644 index 00000000..e69de29b diff --git a/venv/lib/python3.12/site-packages/pyyaml-6.0.3.dist-info/WHEEL b/venv/lib/python3.12/site-packages/pyyaml-6.0.3.dist-info/WHEEL new file mode 100644 index 00000000..f3e8a970 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pyyaml-6.0.3.dist-info/WHEEL @@ -0,0 +1,7 @@ +Wheel-Version: 1.0 +Generator: setuptools (80.9.0) +Root-Is-Purelib: false +Tag: cp312-cp312-manylinux_2_17_x86_64 +Tag: cp312-cp312-manylinux2014_x86_64 +Tag: cp312-cp312-manylinux_2_28_x86_64 + diff --git a/venv/lib/python3.12/site-packages/pyyaml-6.0.3.dist-info/licenses/LICENSE b/venv/lib/python3.12/site-packages/pyyaml-6.0.3.dist-info/licenses/LICENSE new file mode 100644 index 00000000..2f1b8e15 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pyyaml-6.0.3.dist-info/licenses/LICENSE @@ -0,0 +1,20 @@ +Copyright (c) 2017-2021 Ingy döt Net +Copyright (c) 2006-2016 Kirill Simonov + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/venv/lib/python3.12/site-packages/pyyaml-6.0.3.dist-info/top_level.txt b/venv/lib/python3.12/site-packages/pyyaml-6.0.3.dist-info/top_level.txt new file mode 100644 index 00000000..e6475e91 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pyyaml-6.0.3.dist-info/top_level.txt @@ -0,0 +1,2 @@ +_yaml +yaml diff --git a/venv/lib/python3.12/site-packages/tqdm-4.67.1.dist-info/INSTALLER b/venv/lib/python3.12/site-packages/tqdm-4.67.1.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/venv/lib/python3.12/site-packages/tqdm-4.67.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.12/site-packages/tqdm-4.67.1.dist-info/LICENCE b/venv/lib/python3.12/site-packages/tqdm-4.67.1.dist-info/LICENCE new file mode 100644 index 00000000..a8922b18 --- /dev/null +++ b/venv/lib/python3.12/site-packages/tqdm-4.67.1.dist-info/LICENCE @@ -0,0 +1,49 @@ +`tqdm` is a product of collaborative work. +Unless otherwise stated, all authors (see commit logs) retain copyright +for their respective work, and release the work under the MIT licence +(text below). + +Exceptions or notable authors are listed below +in reverse chronological order: + +* files: * + MPL-2.0 2015-2024 (c) Casper da Costa-Luis + [casperdcl](https://github.com/casperdcl). +* files: tqdm/_tqdm.py + MIT 2016 (c) [PR #96] on behalf of Google Inc. +* files: tqdm/_tqdm.py README.rst .gitignore + MIT 2013 (c) Noam Yorav-Raphael, original author. + +[PR #96]: https://github.com/tqdm/tqdm/pull/96 + + +Mozilla Public Licence (MPL) v. 2.0 - Exhibit A +----------------------------------------------- + +This Source Code Form is subject to the terms of the +Mozilla Public License, v. 2.0. +If a copy of the MPL was not distributed with this project, +You can obtain one at https://mozilla.org/MPL/2.0/. + + +MIT License (MIT) +----------------- + +Copyright (c) 2013 noamraph + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/venv/lib/python3.12/site-packages/tqdm-4.67.1.dist-info/METADATA b/venv/lib/python3.12/site-packages/tqdm-4.67.1.dist-info/METADATA new file mode 100644 index 00000000..181b4dc8 --- /dev/null +++ b/venv/lib/python3.12/site-packages/tqdm-4.67.1.dist-info/METADATA @@ -0,0 +1,1594 @@ +Metadata-Version: 2.1 +Name: tqdm +Version: 4.67.1 +Summary: Fast, Extensible Progress Meter +Maintainer-email: tqdm developers +License: MPL-2.0 AND MIT +Project-URL: homepage, https://tqdm.github.io +Project-URL: repository, https://github.com/tqdm/tqdm +Project-URL: changelog, https://tqdm.github.io/releases +Project-URL: wiki, https://github.com/tqdm/tqdm/wiki +Keywords: progressbar,progressmeter,progress,bar,meter,rate,eta,console,terminal,time +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Console +Classifier: Environment :: MacOS X +Classifier: Environment :: Other Environment +Classifier: Environment :: Win32 (MS Windows) +Classifier: Environment :: X11 Applications +Classifier: Framework :: IPython +Classifier: Framework :: Jupyter +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: Education +Classifier: Intended Audience :: End Users/Desktop +Classifier: Intended Audience :: Other Audience +Classifier: Intended Audience :: System Administrators +Classifier: License :: OSI Approved :: MIT License +Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0) +Classifier: Operating System :: MacOS +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Operating System :: Microsoft +Classifier: Operating System :: Microsoft :: MS-DOS +Classifier: Operating System :: Microsoft :: Windows +Classifier: Operating System :: POSIX +Classifier: Operating System :: POSIX :: BSD +Classifier: Operating System :: POSIX :: BSD :: FreeBSD +Classifier: Operating System :: POSIX :: Linux +Classifier: Operating System :: POSIX :: SunOS/Solaris +Classifier: Operating System :: Unix +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: Implementation +Classifier: Programming Language :: Python :: Implementation :: IronPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Programming Language :: Unix Shell +Classifier: Topic :: Desktop Environment +Classifier: Topic :: Education :: Computer Aided Instruction (CAI) +Classifier: Topic :: Education :: Testing +Classifier: Topic :: Office/Business +Classifier: Topic :: Other/Nonlisted Topic +Classifier: Topic :: Software Development :: Build Tools +Classifier: Topic :: Software Development :: Libraries +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Software Development :: Pre-processors +Classifier: Topic :: Software Development :: User Interfaces +Classifier: Topic :: System :: Installation/Setup +Classifier: Topic :: System :: Logging +Classifier: Topic :: System :: Monitoring +Classifier: Topic :: System :: Shells +Classifier: Topic :: Terminals +Classifier: Topic :: Utilities +Requires-Python: >=3.7 +Description-Content-Type: text/x-rst +License-File: LICENCE +Requires-Dist: colorama; platform_system == "Windows" +Provides-Extra: dev +Requires-Dist: pytest>=6; extra == "dev" +Requires-Dist: pytest-cov; extra == "dev" +Requires-Dist: pytest-timeout; extra == "dev" +Requires-Dist: pytest-asyncio>=0.24; extra == "dev" +Requires-Dist: nbval; extra == "dev" +Provides-Extra: discord +Requires-Dist: requests; extra == "discord" +Provides-Extra: slack +Requires-Dist: slack-sdk; extra == "slack" +Provides-Extra: telegram +Requires-Dist: requests; extra == "telegram" +Provides-Extra: notebook +Requires-Dist: ipywidgets>=6; extra == "notebook" + +|Logo| + +tqdm +==== + +|Py-Versions| |Versions| |Conda-Forge-Status| |Docker| |Snapcraft| + +|Build-Status| |Coverage-Status| |Branch-Coverage-Status| |Codacy-Grade| |Libraries-Rank| |PyPI-Downloads| + +|LICENCE| |OpenHub-Status| |binder-demo| |awesome-python| + +``tqdm`` derives from the Arabic word *taqaddum* (تقدّم) which can mean "progress," +and is an abbreviation for "I love you so much" in Spanish (*te quiero demasiado*). + +Instantly make your loops show a smart progress meter - just wrap any +iterable with ``tqdm(iterable)``, and you're done! + +.. code:: python + + from tqdm import tqdm + for i in tqdm(range(10000)): + ... + +``76%|████████████████████████        | 7568/10000 [00:33<00:10, 229.00it/s]`` + +``trange(N)`` can be also used as a convenient shortcut for +``tqdm(range(N))``. + +|Screenshot| + |Video| |Slides| |Merch| + +It can also be executed as a module with pipes: + +.. code:: sh + + $ seq 9999999 | tqdm --bytes | wc -l + 75.2MB [00:00, 217MB/s] + 9999999 + + $ tar -zcf - docs/ | tqdm --bytes --total `du -sb docs/ | cut -f1` \ + > backup.tgz + 32%|██████████▍ | 8.89G/27.9G [00:42<01:31, 223MB/s] + +Overhead is low -- about 60ns per iteration (80ns with ``tqdm.gui``), and is +unit tested against performance regression. +By comparison, the well-established +`ProgressBar `__ has +an 800ns/iter overhead. + +In addition to its low overhead, ``tqdm`` uses smart algorithms to predict +the remaining time and to skip unnecessary iteration displays, which allows +for a negligible overhead in most cases. + +``tqdm`` works on any platform +(Linux, Windows, Mac, FreeBSD, NetBSD, Solaris/SunOS), +in any console or in a GUI, and is also friendly with IPython/Jupyter notebooks. + +``tqdm`` does not require any dependencies (not even ``curses``!), just +Python and an environment supporting ``carriage return \r`` and +``line feed \n`` control characters. + +------------------------------------------ + +.. contents:: Table of contents + :backlinks: top + :local: + + +Installation +------------ + +Latest PyPI stable release +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +|Versions| |PyPI-Downloads| |Libraries-Dependents| + +.. code:: sh + + pip install tqdm + +Latest development release on GitHub +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +|GitHub-Status| |GitHub-Stars| |GitHub-Commits| |GitHub-Forks| |GitHub-Updated| + +Pull and install pre-release ``devel`` branch: + +.. code:: sh + + pip install "git+https://github.com/tqdm/tqdm.git@devel#egg=tqdm" + +Latest Conda release +~~~~~~~~~~~~~~~~~~~~ + +|Conda-Forge-Status| + +.. code:: sh + + conda install -c conda-forge tqdm + +Latest Snapcraft release +~~~~~~~~~~~~~~~~~~~~~~~~ + +|Snapcraft| + +There are 3 channels to choose from: + +.. code:: sh + + snap install tqdm # implies --stable, i.e. latest tagged release + snap install tqdm --candidate # master branch + snap install tqdm --edge # devel branch + +Note that ``snap`` binaries are purely for CLI use (not ``import``-able), and +automatically set up ``bash`` tab-completion. + +Latest Docker release +~~~~~~~~~~~~~~~~~~~~~ + +|Docker| + +.. code:: sh + + docker pull tqdm/tqdm + docker run -i --rm tqdm/tqdm --help + +Other +~~~~~ + +There are other (unofficial) places where ``tqdm`` may be downloaded, particularly for CLI use: + +|Repology| + +.. |Repology| image:: https://repology.org/badge/tiny-repos/python:tqdm.svg + :target: https://repology.org/project/python:tqdm/versions + +Changelog +--------- + +The list of all changes is available either on GitHub's Releases: +|GitHub-Status|, on the +`wiki `__, or on the +`website `__. + + +Usage +----- + +``tqdm`` is very versatile and can be used in a number of ways. +The three main ones are given below. + +Iterable-based +~~~~~~~~~~~~~~ + +Wrap ``tqdm()`` around any iterable: + +.. code:: python + + from tqdm import tqdm + from time import sleep + + text = "" + for char in tqdm(["a", "b", "c", "d"]): + sleep(0.25) + text = text + char + +``trange(i)`` is a special optimised instance of ``tqdm(range(i))``: + +.. code:: python + + from tqdm import trange + + for i in trange(100): + sleep(0.01) + +Instantiation outside of the loop allows for manual control over ``tqdm()``: + +.. code:: python + + pbar = tqdm(["a", "b", "c", "d"]) + for char in pbar: + sleep(0.25) + pbar.set_description("Processing %s" % char) + +Manual +~~~~~~ + +Manual control of ``tqdm()`` updates using a ``with`` statement: + +.. code:: python + + with tqdm(total=100) as pbar: + for i in range(10): + sleep(0.1) + pbar.update(10) + +If the optional variable ``total`` (or an iterable with ``len()``) is +provided, predictive stats are displayed. + +``with`` is also optional (you can just assign ``tqdm()`` to a variable, +but in this case don't forget to ``del`` or ``close()`` at the end: + +.. code:: python + + pbar = tqdm(total=100) + for i in range(10): + sleep(0.1) + pbar.update(10) + pbar.close() + +Module +~~~~~~ + +Perhaps the most wonderful use of ``tqdm`` is in a script or on the command +line. Simply inserting ``tqdm`` (or ``python -m tqdm``) between pipes will pass +through all ``stdin`` to ``stdout`` while printing progress to ``stderr``. + +The example below demonstrate counting the number of lines in all Python files +in the current directory, with timing information included. + +.. code:: sh + + $ time find . -name '*.py' -type f -exec cat \{} \; | wc -l + 857365 + + real 0m3.458s + user 0m0.274s + sys 0m3.325s + + $ time find . -name '*.py' -type f -exec cat \{} \; | tqdm | wc -l + 857366it [00:03, 246471.31it/s] + 857365 + + real 0m3.585s + user 0m0.862s + sys 0m3.358s + +Note that the usual arguments for ``tqdm`` can also be specified. + +.. code:: sh + + $ find . -name '*.py' -type f -exec cat \{} \; | + tqdm --unit loc --unit_scale --total 857366 >> /dev/null + 100%|█████████████████████████████████| 857K/857K [00:04<00:00, 246Kloc/s] + +Backing up a large directory? + +.. code:: sh + + $ tar -zcf - docs/ | tqdm --bytes --total `du -sb docs/ | cut -f1` \ + > backup.tgz + 44%|██████████████▊ | 153M/352M [00:14<00:18, 11.0MB/s] + +This can be beautified further: + +.. code:: sh + + $ BYTES=$(du -sb docs/ | cut -f1) + $ tar -cf - docs/ \ + | tqdm --bytes --total "$BYTES" --desc Processing | gzip \ + | tqdm --bytes --total "$BYTES" --desc Compressed --position 1 \ + > ~/backup.tgz + Processing: 100%|██████████████████████| 352M/352M [00:14<00:00, 30.2MB/s] + Compressed: 42%|█████████▎ | 148M/352M [00:14<00:19, 10.9MB/s] + +Or done on a file level using 7-zip: + +.. code:: sh + + $ 7z a -bd -r backup.7z docs/ | grep Compressing \ + | tqdm --total $(find docs/ -type f | wc -l) --unit files \ + | grep -v Compressing + 100%|██████████████████████████▉| 15327/15327 [01:00<00:00, 712.96files/s] + +Pre-existing CLI programs already outputting basic progress information will +benefit from ``tqdm``'s ``--update`` and ``--update_to`` flags: + +.. code:: sh + + $ seq 3 0.1 5 | tqdm --total 5 --update_to --null + 100%|████████████████████████████████████| 5.0/5 [00:00<00:00, 9673.21it/s] + $ seq 10 | tqdm --update --null # 1 + 2 + ... + 10 = 55 iterations + 55it [00:00, 90006.52it/s] + +FAQ and Known Issues +-------------------- + +|GitHub-Issues| + +The most common issues relate to excessive output on multiple lines, instead +of a neat one-line progress bar. + +- Consoles in general: require support for carriage return (``CR``, ``\r``). + + * Some cloud logging consoles which don't support ``\r`` properly + (`cloudwatch `__, + `K8s `__) may benefit from + ``export TQDM_POSITION=-1``. + +- Nested progress bars: + + * Consoles in general: require support for moving cursors up to the + previous line. For example, + `IDLE `__, + `ConEmu `__ and + `PyCharm `__ (also + `here `__, + `here `__, and + `here `__) + lack full support. + * Windows: additionally may require the Python module ``colorama`` + to ensure nested bars stay within their respective lines. + +- Unicode: + + * Environments which report that they support unicode will have solid smooth + progressbars. The fallback is an ``ascii``-only bar. + * Windows consoles often only partially support unicode and thus + `often require explicit ascii=True `__ + (also `here `__). This is due to + either normal-width unicode characters being incorrectly displayed as + "wide", or some unicode characters not rendering. + +- Wrapping generators: + + * Generator wrapper functions tend to hide the length of iterables. + ``tqdm`` does not. + * Replace ``tqdm(enumerate(...))`` with ``enumerate(tqdm(...))`` or + ``tqdm(enumerate(x), total=len(x), ...)``. + The same applies to ``numpy.ndenumerate``. + * Replace ``tqdm(zip(a, b))`` with ``zip(tqdm(a), b)`` or even + ``zip(tqdm(a), tqdm(b))``. + * The same applies to ``itertools``. + * Some useful convenience functions can be found under ``tqdm.contrib``. + +- `No intermediate output in docker-compose `__: + use ``docker-compose run`` instead of ``docker-compose up`` and ``tty: true``. + +- Overriding defaults via environment variables: + e.g. in CI/cloud jobs, ``export TQDM_MININTERVAL=5`` to avoid log spam. + This override logic is handled by the ``tqdm.utils.envwrap`` decorator + (useful independent of ``tqdm``). + +If you come across any other difficulties, browse and file |GitHub-Issues|. + +Documentation +------------- + +|Py-Versions| |README-Hits| (Since 19 May 2016) + +.. code:: python + + class tqdm(): + """ + Decorate an iterable object, returning an iterator which acts exactly + like the original iterable, but prints a dynamically updating + progressbar every time a value is requested. + """ + + @envwrap("TQDM_") # override defaults via env vars + def __init__(self, iterable=None, desc=None, total=None, leave=True, + file=None, ncols=None, mininterval=0.1, + maxinterval=10.0, miniters=None, ascii=None, disable=False, + unit='it', unit_scale=False, dynamic_ncols=False, + smoothing=0.3, bar_format=None, initial=0, position=None, + postfix=None, unit_divisor=1000, write_bytes=False, + lock_args=None, nrows=None, colour=None, delay=0): + +Parameters +~~~~~~~~~~ + +* iterable : iterable, optional + Iterable to decorate with a progressbar. + Leave blank to manually manage the updates. +* desc : str, optional + Prefix for the progressbar. +* total : int or float, optional + The number of expected iterations. If unspecified, + len(iterable) is used if possible. If float("inf") or as a last + resort, only basic progress statistics are displayed + (no ETA, no progressbar). + If ``gui`` is True and this parameter needs subsequent updating, + specify an initial arbitrary large positive number, + e.g. 9e9. +* leave : bool, optional + If [default: True], keeps all traces of the progressbar + upon termination of iteration. + If ``None``, will leave only if ``position`` is ``0``. +* file : ``io.TextIOWrapper`` or ``io.StringIO``, optional + Specifies where to output the progress messages + (default: sys.stderr). Uses ``file.write(str)`` and ``file.flush()`` + methods. For encoding, see ``write_bytes``. +* ncols : int, optional + The width of the entire output message. If specified, + dynamically resizes the progressbar to stay within this bound. + If unspecified, attempts to use environment width. The + fallback is a meter width of 10 and no limit for the counter and + statistics. If 0, will not print any meter (only stats). +* mininterval : float, optional + Minimum progress display update interval [default: 0.1] seconds. +* maxinterval : float, optional + Maximum progress display update interval [default: 10] seconds. + Automatically adjusts ``miniters`` to correspond to ``mininterval`` + after long display update lag. Only works if ``dynamic_miniters`` + or monitor thread is enabled. +* miniters : int or float, optional + Minimum progress display update interval, in iterations. + If 0 and ``dynamic_miniters``, will automatically adjust to equal + ``mininterval`` (more CPU efficient, good for tight loops). + If > 0, will skip display of specified number of iterations. + Tweak this and ``mininterval`` to get very efficient loops. + If your progress is erratic with both fast and slow iterations + (network, skipping items, etc) you should set miniters=1. +* ascii : bool or str, optional + If unspecified or False, use unicode (smooth blocks) to fill + the meter. The fallback is to use ASCII characters " 123456789#". +* disable : bool, optional + Whether to disable the entire progressbar wrapper + [default: False]. If set to None, disable on non-TTY. +* unit : str, optional + String that will be used to define the unit of each iteration + [default: it]. +* unit_scale : bool or int or float, optional + If 1 or True, the number of iterations will be reduced/scaled + automatically and a metric prefix following the + International System of Units standard will be added + (kilo, mega, etc.) [default: False]. If any other non-zero + number, will scale ``total`` and ``n``. +* dynamic_ncols : bool, optional + If set, constantly alters ``ncols`` and ``nrows`` to the + environment (allowing for window resizes) [default: False]. +* smoothing : float, optional + Exponential moving average smoothing factor for speed estimates + (ignored in GUI mode). Ranges from 0 (average speed) to 1 + (current/instantaneous speed) [default: 0.3]. +* bar_format : str, optional + Specify a custom bar string formatting. May impact performance. + [default: '{l_bar}{bar}{r_bar}'], where + l_bar='{desc}: {percentage:3.0f}%|' and + r_bar='| {n_fmt}/{total_fmt} [{elapsed}<{remaining}, ' + '{rate_fmt}{postfix}]' + Possible vars: l_bar, bar, r_bar, n, n_fmt, total, total_fmt, + percentage, elapsed, elapsed_s, ncols, nrows, desc, unit, + rate, rate_fmt, rate_noinv, rate_noinv_fmt, + rate_inv, rate_inv_fmt, postfix, unit_divisor, + remaining, remaining_s, eta. + Note that a trailing ": " is automatically removed after {desc} + if the latter is empty. +* initial : int or float, optional + The initial counter value. Useful when restarting a progress + bar [default: 0]. If using float, consider specifying ``{n:.3f}`` + or similar in ``bar_format``, or specifying ``unit_scale``. +* position : int, optional + Specify the line offset to print this bar (starting from 0) + Automatic if unspecified. + Useful to manage multiple bars at once (eg, from threads). +* postfix : dict or ``*``, optional + Specify additional stats to display at the end of the bar. + Calls ``set_postfix(**postfix)`` if possible (dict). +* unit_divisor : float, optional + [default: 1000], ignored unless ``unit_scale`` is True. +* write_bytes : bool, optional + Whether to write bytes. If (default: False) will write unicode. +* lock_args : tuple, optional + Passed to ``refresh`` for intermediate output + (initialisation, iterating, and updating). +* nrows : int, optional + The screen height. If specified, hides nested bars outside this + bound. If unspecified, attempts to use environment height. + The fallback is 20. +* colour : str, optional + Bar colour (e.g. 'green', '#00ff00'). +* delay : float, optional + Don't display until [default: 0] seconds have elapsed. + +Extra CLI Options +~~~~~~~~~~~~~~~~~ + +* delim : chr, optional + Delimiting character [default: '\n']. Use '\0' for null. + N.B.: on Windows systems, Python converts '\n' to '\r\n'. +* buf_size : int, optional + String buffer size in bytes [default: 256] + used when ``delim`` is specified. +* bytes : bool, optional + If true, will count bytes, ignore ``delim``, and default + ``unit_scale`` to True, ``unit_divisor`` to 1024, and ``unit`` to 'B'. +* tee : bool, optional + If true, passes ``stdin`` to both ``stderr`` and ``stdout``. +* update : bool, optional + If true, will treat input as newly elapsed iterations, + i.e. numbers to pass to ``update()``. Note that this is slow + (~2e5 it/s) since every input must be decoded as a number. +* update_to : bool, optional + If true, will treat input as total elapsed iterations, + i.e. numbers to assign to ``self.n``. Note that this is slow + (~2e5 it/s) since every input must be decoded as a number. +* null : bool, optional + If true, will discard input (no stdout). +* manpath : str, optional + Directory in which to install tqdm man pages. +* comppath : str, optional + Directory in which to place tqdm completion. +* log : str, optional + CRITICAL|FATAL|ERROR|WARN(ING)|[default: 'INFO']|DEBUG|NOTSET. + +Returns +~~~~~~~ + +* out : decorated iterator. + +.. code:: python + + class tqdm(): + def update(self, n=1): + """ + Manually update the progress bar, useful for streams + such as reading files. + E.g.: + >>> t = tqdm(total=filesize) # Initialise + >>> for current_buffer in stream: + ... ... + ... t.update(len(current_buffer)) + >>> t.close() + The last line is highly recommended, but possibly not necessary if + ``t.update()`` will be called in such a way that ``filesize`` will be + exactly reached and printed. + + Parameters + ---------- + n : int or float, optional + Increment to add to the internal counter of iterations + [default: 1]. If using float, consider specifying ``{n:.3f}`` + or similar in ``bar_format``, or specifying ``unit_scale``. + + Returns + ------- + out : bool or None + True if a ``display()`` was triggered. + """ + + def close(self): + """Cleanup and (if leave=False) close the progressbar.""" + + def clear(self, nomove=False): + """Clear current bar display.""" + + def refresh(self): + """ + Force refresh the display of this bar. + + Parameters + ---------- + nolock : bool, optional + If ``True``, does not lock. + If [default: ``False``]: calls ``acquire()`` on internal lock. + lock_args : tuple, optional + Passed to internal lock's ``acquire()``. + If specified, will only ``display()`` if ``acquire()`` returns ``True``. + """ + + def unpause(self): + """Restart tqdm timer from last print time.""" + + def reset(self, total=None): + """ + Resets to 0 iterations for repeated use. + + Consider combining with ``leave=True``. + + Parameters + ---------- + total : int or float, optional. Total to use for the new bar. + """ + + def set_description(self, desc=None, refresh=True): + """ + Set/modify description of the progress bar. + + Parameters + ---------- + desc : str, optional + refresh : bool, optional + Forces refresh [default: True]. + """ + + def set_postfix(self, ordered_dict=None, refresh=True, **tqdm_kwargs): + """ + Set/modify postfix (additional stats) + with automatic formatting based on datatype. + + Parameters + ---------- + ordered_dict : dict or OrderedDict, optional + refresh : bool, optional + Forces refresh [default: True]. + kwargs : dict, optional + """ + + @classmethod + def write(cls, s, file=sys.stdout, end="\n"): + """Print a message via tqdm (without overlap with bars).""" + + @property + def format_dict(self): + """Public API for read-only member access.""" + + def display(self, msg=None, pos=None): + """ + Use ``self.sp`` to display ``msg`` in the specified ``pos``. + + Consider overloading this function when inheriting to use e.g.: + ``self.some_frontend(**self.format_dict)`` instead of ``self.sp``. + + Parameters + ---------- + msg : str, optional. What to display (default: ``repr(self)``). + pos : int, optional. Position to ``moveto`` + (default: ``abs(self.pos)``). + """ + + @classmethod + @contextmanager + def wrapattr(cls, stream, method, total=None, bytes=True, **tqdm_kwargs): + """ + stream : file-like object. + method : str, "read" or "write". The result of ``read()`` and + the first argument of ``write()`` should have a ``len()``. + + >>> with tqdm.wrapattr(file_obj, "read", total=file_obj.size) as fobj: + ... while True: + ... chunk = fobj.read(chunk_size) + ... if not chunk: + ... break + """ + + @classmethod + def pandas(cls, *targs, **tqdm_kwargs): + """Registers the current `tqdm` class with `pandas`.""" + + def trange(*args, **tqdm_kwargs): + """Shortcut for `tqdm(range(*args), **tqdm_kwargs)`.""" + +Convenience Functions +~~~~~~~~~~~~~~~~~~~~~ + +.. code:: python + + def tqdm.contrib.tenumerate(iterable, start=0, total=None, + tqdm_class=tqdm.auto.tqdm, **tqdm_kwargs): + """Equivalent of `numpy.ndenumerate` or builtin `enumerate`.""" + + def tqdm.contrib.tzip(iter1, *iter2plus, **tqdm_kwargs): + """Equivalent of builtin `zip`.""" + + def tqdm.contrib.tmap(function, *sequences, **tqdm_kwargs): + """Equivalent of builtin `map`.""" + +Submodules +~~~~~~~~~~ + +.. code:: python + + class tqdm.notebook.tqdm(tqdm.tqdm): + """IPython/Jupyter Notebook widget.""" + + class tqdm.auto.tqdm(tqdm.tqdm): + """Automatically chooses beween `tqdm.notebook` and `tqdm.tqdm`.""" + + class tqdm.asyncio.tqdm(tqdm.tqdm): + """Asynchronous version.""" + @classmethod + def as_completed(cls, fs, *, loop=None, timeout=None, total=None, + **tqdm_kwargs): + """Wrapper for `asyncio.as_completed`.""" + + class tqdm.gui.tqdm(tqdm.tqdm): + """Matplotlib GUI version.""" + + class tqdm.tk.tqdm(tqdm.tqdm): + """Tkinter GUI version.""" + + class tqdm.rich.tqdm(tqdm.tqdm): + """`rich.progress` version.""" + + class tqdm.keras.TqdmCallback(keras.callbacks.Callback): + """Keras callback for epoch and batch progress.""" + + class tqdm.dask.TqdmCallback(dask.callbacks.Callback): + """Dask callback for task progress.""" + + +``contrib`` ++++++++++++ + +The ``tqdm.contrib`` package also contains experimental modules: + +- ``tqdm.contrib.itertools``: Thin wrappers around ``itertools`` +- ``tqdm.contrib.concurrent``: Thin wrappers around ``concurrent.futures`` +- ``tqdm.contrib.slack``: Posts to `Slack `__ bots +- ``tqdm.contrib.discord``: Posts to `Discord `__ bots +- ``tqdm.contrib.telegram``: Posts to `Telegram `__ bots +- ``tqdm.contrib.bells``: Automagically enables all optional features + + * ``auto``, ``pandas``, ``slack``, ``discord``, ``telegram`` + +Examples and Advanced Usage +--------------------------- + +- See the `examples `__ + folder; +- import the module and run ``help()``; +- consult the `wiki `__; + + * this has an + `excellent article `__ + on how to make a **great** progressbar; + +- check out the `slides from PyData London `__, or +- run the |binder-demo|. + +Description and additional stats +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Custom information can be displayed and updated dynamically on ``tqdm`` bars +with the ``desc`` and ``postfix`` arguments: + +.. code:: python + + from tqdm import tqdm, trange + from random import random, randint + from time import sleep + + with trange(10) as t: + for i in t: + # Description will be displayed on the left + t.set_description('GEN %i' % i) + # Postfix will be displayed on the right, + # formatted automatically based on argument's datatype + t.set_postfix(loss=random(), gen=randint(1,999), str='h', + lst=[1, 2]) + sleep(0.1) + + with tqdm(total=10, bar_format="{postfix[0]} {postfix[1][value]:>8.2g}", + postfix=["Batch", {"value": 0}]) as t: + for i in range(10): + sleep(0.1) + t.postfix[1]["value"] = i / 2 + t.update() + +Points to remember when using ``{postfix[...]}`` in the ``bar_format`` string: + +- ``postfix`` also needs to be passed as an initial argument in a compatible + format, and +- ``postfix`` will be auto-converted to a string if it is a ``dict``-like + object. To prevent this behaviour, insert an extra item into the dictionary + where the key is not a string. + +Additional ``bar_format`` parameters may also be defined by overriding +``format_dict``, and the bar itself may be modified using ``ascii``: + +.. code:: python + + from tqdm import tqdm + class TqdmExtraFormat(tqdm): + """Provides a `total_time` format parameter""" + @property + def format_dict(self): + d = super().format_dict + total_time = d["elapsed"] * (d["total"] or 0) / max(d["n"], 1) + d.update(total_time=self.format_interval(total_time) + " in total") + return d + + for i in TqdmExtraFormat( + range(9), ascii=" .oO0", + bar_format="{total_time}: {percentage:.0f}%|{bar}{r_bar}"): + if i == 4: + break + +.. code:: + + 00:00 in total: 44%|0000. | 4/9 [00:00<00:00, 962.93it/s] + +Note that ``{bar}`` also supports a format specifier ``[width][type]``. + +- ``width`` + + * unspecified (default): automatic to fill ``ncols`` + * ``int >= 0``: fixed width overriding ``ncols`` logic + * ``int < 0``: subtract from the automatic default + +- ``type`` + + * ``a``: ascii (``ascii=True`` override) + * ``u``: unicode (``ascii=False`` override) + * ``b``: blank (``ascii=" "`` override) + +This means a fixed bar with right-justified text may be created by using: +``bar_format="{l_bar}{bar:10}|{bar:-10b}right-justified"`` + +Nested progress bars +~~~~~~~~~~~~~~~~~~~~ + +``tqdm`` supports nested progress bars. Here's an example: + +.. code:: python + + from tqdm.auto import trange + from time import sleep + + for i in trange(4, desc='1st loop'): + for j in trange(5, desc='2nd loop'): + for k in trange(50, desc='3rd loop', leave=False): + sleep(0.01) + +For manual control over positioning (e.g. for multi-processing use), +you may specify ``position=n`` where ``n=0`` for the outermost bar, +``n=1`` for the next, and so on. +However, it's best to check if ``tqdm`` can work without manual ``position`` +first. + +.. code:: python + + from time import sleep + from tqdm import trange, tqdm + from multiprocessing import Pool, RLock, freeze_support + + L = list(range(9)) + + def progresser(n): + interval = 0.001 / (n + 2) + total = 5000 + text = f"#{n}, est. {interval * total:<04.2}s" + for _ in trange(total, desc=text, position=n): + sleep(interval) + + if __name__ == '__main__': + freeze_support() # for Windows support + tqdm.set_lock(RLock()) # for managing output contention + p = Pool(initializer=tqdm.set_lock, initargs=(tqdm.get_lock(),)) + p.map(progresser, L) + +Note that in Python 3, ``tqdm.write`` is thread-safe: + +.. code:: python + + from time import sleep + from tqdm import tqdm, trange + from concurrent.futures import ThreadPoolExecutor + + L = list(range(9)) + + def progresser(n): + interval = 0.001 / (n + 2) + total = 5000 + text = f"#{n}, est. {interval * total:<04.2}s" + for _ in trange(total, desc=text): + sleep(interval) + if n == 6: + tqdm.write("n == 6 completed.") + tqdm.write("`tqdm.write()` is thread-safe in py3!") + + if __name__ == '__main__': + with ThreadPoolExecutor() as p: + p.map(progresser, L) + +Hooks and callbacks +~~~~~~~~~~~~~~~~~~~ + +``tqdm`` can easily support callbacks/hooks and manual updates. +Here's an example with ``urllib``: + +**``urllib.urlretrieve`` documentation** + + | [...] + | If present, the hook function will be called once + | on establishment of the network connection and once after each block read + | thereafter. The hook will be passed three arguments; a count of blocks + | transferred so far, a block size in bytes, and the total size of the file. + | [...] + +.. code:: python + + import urllib, os + from tqdm import tqdm + urllib = getattr(urllib, 'request', urllib) + + class TqdmUpTo(tqdm): + """Provides `update_to(n)` which uses `tqdm.update(delta_n)`.""" + def update_to(self, b=1, bsize=1, tsize=None): + """ + b : int, optional + Number of blocks transferred so far [default: 1]. + bsize : int, optional + Size of each block (in tqdm units) [default: 1]. + tsize : int, optional + Total size (in tqdm units). If [default: None] remains unchanged. + """ + if tsize is not None: + self.total = tsize + return self.update(b * bsize - self.n) # also sets self.n = b * bsize + + eg_link = "https://caspersci.uk.to/matryoshka.zip" + with TqdmUpTo(unit='B', unit_scale=True, unit_divisor=1024, miniters=1, + desc=eg_link.split('/')[-1]) as t: # all optional kwargs + urllib.urlretrieve(eg_link, filename=os.devnull, + reporthook=t.update_to, data=None) + t.total = t.n + +Inspired by `twine#242 `__. +Functional alternative in +`examples/tqdm_wget.py `__. + +It is recommend to use ``miniters=1`` whenever there is potentially +large differences in iteration speed (e.g. downloading a file over +a patchy connection). + +**Wrapping read/write methods** + +To measure throughput through a file-like object's ``read`` or ``write`` +methods, use ``CallbackIOWrapper``: + +.. code:: python + + from tqdm.auto import tqdm + from tqdm.utils import CallbackIOWrapper + + with tqdm(total=file_obj.size, + unit='B', unit_scale=True, unit_divisor=1024) as t: + fobj = CallbackIOWrapper(t.update, file_obj, "read") + while True: + chunk = fobj.read(chunk_size) + if not chunk: + break + t.reset() + # ... continue to use `t` for something else + +Alternatively, use the even simpler ``wrapattr`` convenience function, +which would condense both the ``urllib`` and ``CallbackIOWrapper`` examples +down to: + +.. code:: python + + import urllib, os + from tqdm import tqdm + + eg_link = "https://caspersci.uk.to/matryoshka.zip" + response = getattr(urllib, 'request', urllib).urlopen(eg_link) + with tqdm.wrapattr(open(os.devnull, "wb"), "write", + miniters=1, desc=eg_link.split('/')[-1], + total=getattr(response, 'length', None)) as fout: + for chunk in response: + fout.write(chunk) + +The ``requests`` equivalent is nearly identical: + +.. code:: python + + import requests, os + from tqdm import tqdm + + eg_link = "https://caspersci.uk.to/matryoshka.zip" + response = requests.get(eg_link, stream=True) + with tqdm.wrapattr(open(os.devnull, "wb"), "write", + miniters=1, desc=eg_link.split('/')[-1], + total=int(response.headers.get('content-length', 0))) as fout: + for chunk in response.iter_content(chunk_size=4096): + fout.write(chunk) + +**Custom callback** + +``tqdm`` is known for intelligently skipping unnecessary displays. To make a +custom callback take advantage of this, simply use the return value of +``update()``. This is set to ``True`` if a ``display()`` was triggered. + +.. code:: python + + from tqdm.auto import tqdm as std_tqdm + + def external_callback(*args, **kwargs): + ... + + class TqdmExt(std_tqdm): + def update(self, n=1): + displayed = super().update(n) + if displayed: + external_callback(**self.format_dict) + return displayed + +``asyncio`` +~~~~~~~~~~~ + +Note that ``break`` isn't currently caught by asynchronous iterators. +This means that ``tqdm`` cannot clean up after itself in this case: + +.. code:: python + + from tqdm.asyncio import tqdm + + async for i in tqdm(range(9)): + if i == 2: + break + +Instead, either call ``pbar.close()`` manually or use the context manager syntax: + +.. code:: python + + from tqdm.asyncio import tqdm + + with tqdm(range(9)) as pbar: + async for i in pbar: + if i == 2: + break + +Pandas Integration +~~~~~~~~~~~~~~~~~~ + +Due to popular demand we've added support for ``pandas`` -- here's an example +for ``DataFrame.progress_apply`` and ``DataFrameGroupBy.progress_apply``: + +.. code:: python + + import pandas as pd + import numpy as np + from tqdm import tqdm + + df = pd.DataFrame(np.random.randint(0, 100, (100000, 6))) + + # Register `pandas.progress_apply` and `pandas.Series.map_apply` with `tqdm` + # (can use `tqdm.gui.tqdm`, `tqdm.notebook.tqdm`, optional kwargs, etc.) + tqdm.pandas(desc="my bar!") + + # Now you can use `progress_apply` instead of `apply` + # and `progress_map` instead of `map` + df.progress_apply(lambda x: x**2) + # can also groupby: + # df.groupby(0).progress_apply(lambda x: x**2) + +In case you're interested in how this works (and how to modify it for your +own callbacks), see the +`examples `__ +folder or import the module and run ``help()``. + +Keras Integration +~~~~~~~~~~~~~~~~~ + +A ``keras`` callback is also available: + +.. code:: python + + from tqdm.keras import TqdmCallback + + ... + + model.fit(..., verbose=0, callbacks=[TqdmCallback()]) + +Dask Integration +~~~~~~~~~~~~~~~~ + +A ``dask`` callback is also available: + +.. code:: python + + from tqdm.dask import TqdmCallback + + with TqdmCallback(desc="compute"): + ... + arr.compute() + + # or use callback globally + cb = TqdmCallback(desc="global") + cb.register() + arr.compute() + +IPython/Jupyter Integration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +IPython/Jupyter is supported via the ``tqdm.notebook`` submodule: + +.. code:: python + + from tqdm.notebook import trange, tqdm + from time import sleep + + for i in trange(3, desc='1st loop'): + for j in tqdm(range(100), desc='2nd loop'): + sleep(0.01) + +In addition to ``tqdm`` features, the submodule provides a native Jupyter +widget (compatible with IPython v1-v4 and Jupyter), fully working nested bars +and colour hints (blue: normal, green: completed, red: error/interrupt, +light blue: no ETA); as demonstrated below. + +|Screenshot-Jupyter1| +|Screenshot-Jupyter2| +|Screenshot-Jupyter3| + +The ``notebook`` version supports percentage or pixels for overall width +(e.g.: ``ncols='100%'`` or ``ncols='480px'``). + +It is also possible to let ``tqdm`` automatically choose between +console or notebook versions by using the ``autonotebook`` submodule: + +.. code:: python + + from tqdm.autonotebook import tqdm + tqdm.pandas() + +Note that this will issue a ``TqdmExperimentalWarning`` if run in a notebook +since it is not meant to be possible to distinguish between ``jupyter notebook`` +and ``jupyter console``. Use ``auto`` instead of ``autonotebook`` to suppress +this warning. + +Note that notebooks will display the bar in the cell where it was created. +This may be a different cell from the one where it is used. +If this is not desired, either + +- delay the creation of the bar to the cell where it must be displayed, or +- create the bar with ``display=False``, and in a later cell call + ``display(bar.container)``: + +.. code:: python + + from tqdm.notebook import tqdm + pbar = tqdm(..., display=False) + +.. code:: python + + # different cell + display(pbar.container) + +The ``keras`` callback has a ``display()`` method which can be used likewise: + +.. code:: python + + from tqdm.keras import TqdmCallback + cbk = TqdmCallback(display=False) + +.. code:: python + + # different cell + cbk.display() + model.fit(..., verbose=0, callbacks=[cbk]) + +Another possibility is to have a single bar (near the top of the notebook) +which is constantly re-used (using ``reset()`` rather than ``close()``). +For this reason, the notebook version (unlike the CLI version) does not +automatically call ``close()`` upon ``Exception``. + +.. code:: python + + from tqdm.notebook import tqdm + pbar = tqdm() + +.. code:: python + + # different cell + iterable = range(100) + pbar.reset(total=len(iterable)) # initialise with new `total` + for i in iterable: + pbar.update() + pbar.refresh() # force print final status but don't `close()` + +Custom Integration +~~~~~~~~~~~~~~~~~~ + +To change the default arguments (such as making ``dynamic_ncols=True``), +simply use built-in Python magic: + +.. code:: python + + from functools import partial + from tqdm import tqdm as std_tqdm + tqdm = partial(std_tqdm, dynamic_ncols=True) + +For further customisation, +``tqdm`` may be inherited from to create custom callbacks (as with the +``TqdmUpTo`` example `above <#hooks-and-callbacks>`__) or for custom frontends +(e.g. GUIs such as notebook or plotting packages). In the latter case: + +1. ``def __init__()`` to call ``super().__init__(..., gui=True)`` to disable + terminal ``status_printer`` creation. +2. Redefine: ``close()``, ``clear()``, ``display()``. + +Consider overloading ``display()`` to use e.g. +``self.frontend(**self.format_dict)`` instead of ``self.sp(repr(self))``. + +Some submodule examples of inheritance: + +- `tqdm/notebook.py `__ +- `tqdm/gui.py `__ +- `tqdm/tk.py `__ +- `tqdm/contrib/slack.py `__ +- `tqdm/contrib/discord.py `__ +- `tqdm/contrib/telegram.py `__ + +Dynamic Monitor/Meter +~~~~~~~~~~~~~~~~~~~~~ + +You can use a ``tqdm`` as a meter which is not monotonically increasing. +This could be because ``n`` decreases (e.g. a CPU usage monitor) or ``total`` +changes. + +One example would be recursively searching for files. The ``total`` is the +number of objects found so far, while ``n`` is the number of those objects which +are files (rather than folders): + +.. code:: python + + from tqdm import tqdm + import os.path + + def find_files_recursively(path, show_progress=True): + files = [] + # total=1 assumes `path` is a file + t = tqdm(total=1, unit="file", disable=not show_progress) + if not os.path.exists(path): + raise IOError("Cannot find:" + path) + + def append_found_file(f): + files.append(f) + t.update() + + def list_found_dir(path): + """returns os.listdir(path) assuming os.path.isdir(path)""" + listing = os.listdir(path) + # subtract 1 since a "file" we found was actually this directory + t.total += len(listing) - 1 + # fancy way to give info without forcing a refresh + t.set_postfix(dir=path[-10:], refresh=False) + t.update(0) # may trigger a refresh + return listing + + def recursively_search(path): + if os.path.isdir(path): + for f in list_found_dir(path): + recursively_search(os.path.join(path, f)) + else: + append_found_file(path) + + recursively_search(path) + t.set_postfix(dir=path) + t.close() + return files + +Using ``update(0)`` is a handy way to let ``tqdm`` decide when to trigger a +display refresh to avoid console spamming. + +Writing messages +~~~~~~~~~~~~~~~~ + +This is a work in progress (see +`#737 `__). + +Since ``tqdm`` uses a simple printing mechanism to display progress bars, +you should not write any message in the terminal using ``print()`` while +a progressbar is open. + +To write messages in the terminal without any collision with ``tqdm`` bar +display, a ``.write()`` method is provided: + +.. code:: python + + from tqdm.auto import tqdm, trange + from time import sleep + + bar = trange(10) + for i in bar: + # Print using tqdm class method .write() + sleep(0.1) + if not (i % 3): + tqdm.write("Done task %i" % i) + # Can also use bar.write() + +By default, this will print to standard output ``sys.stdout``. but you can +specify any file-like object using the ``file`` argument. For example, this +can be used to redirect the messages writing to a log file or class. + +Redirecting writing +~~~~~~~~~~~~~~~~~~~ + +If using a library that can print messages to the console, editing the library +by replacing ``print()`` with ``tqdm.write()`` may not be desirable. +In that case, redirecting ``sys.stdout`` to ``tqdm.write()`` is an option. + +To redirect ``sys.stdout``, create a file-like class that will write +any input string to ``tqdm.write()``, and supply the arguments +``file=sys.stdout, dynamic_ncols=True``. + +A reusable canonical example is given below: + +.. code:: python + + from time import sleep + import contextlib + import sys + from tqdm import tqdm + from tqdm.contrib import DummyTqdmFile + + + @contextlib.contextmanager + def std_out_err_redirect_tqdm(): + orig_out_err = sys.stdout, sys.stderr + try: + sys.stdout, sys.stderr = map(DummyTqdmFile, orig_out_err) + yield orig_out_err[0] + # Relay exceptions + except Exception as exc: + raise exc + # Always restore sys.stdout/err if necessary + finally: + sys.stdout, sys.stderr = orig_out_err + + def some_fun(i): + print("Fee, fi, fo,".split()[i]) + + # Redirect stdout to tqdm.write() (don't forget the `as save_stdout`) + with std_out_err_redirect_tqdm() as orig_stdout: + # tqdm needs the original stdout + # and dynamic_ncols=True to autodetect console width + for i in tqdm(range(3), file=orig_stdout, dynamic_ncols=True): + sleep(.5) + some_fun(i) + + # After the `with`, printing is restored + print("Done!") + +Redirecting ``logging`` +~~~~~~~~~~~~~~~~~~~~~~~ + +Similar to ``sys.stdout``/``sys.stderr`` as detailed above, console ``logging`` +may also be redirected to ``tqdm.write()``. + +Warning: if also redirecting ``sys.stdout``/``sys.stderr``, make sure to +redirect ``logging`` first if needed. + +Helper methods are available in ``tqdm.contrib.logging``. For example: + +.. code:: python + + import logging + from tqdm import trange + from tqdm.contrib.logging import logging_redirect_tqdm + + LOG = logging.getLogger(__name__) + + if __name__ == '__main__': + logging.basicConfig(level=logging.INFO) + with logging_redirect_tqdm(): + for i in trange(9): + if i == 4: + LOG.info("console logging redirected to `tqdm.write()`") + # logging restored + +Monitoring thread, intervals and miniters +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +``tqdm`` implements a few tricks to increase efficiency and reduce overhead. + +- Avoid unnecessary frequent bar refreshing: ``mininterval`` defines how long + to wait between each refresh. ``tqdm`` always gets updated in the background, + but it will display only every ``mininterval``. +- Reduce number of calls to check system clock/time. +- ``mininterval`` is more intuitive to configure than ``miniters``. + A clever adjustment system ``dynamic_miniters`` will automatically adjust + ``miniters`` to the amount of iterations that fit into time ``mininterval``. + Essentially, ``tqdm`` will check if it's time to print without actually + checking time. This behaviour can be still be bypassed by manually setting + ``miniters``. + +However, consider a case with a combination of fast and slow iterations. +After a few fast iterations, ``dynamic_miniters`` will set ``miniters`` to a +large number. When iteration rate subsequently slows, ``miniters`` will +remain large and thus reduce display update frequency. To address this: + +- ``maxinterval`` defines the maximum time between display refreshes. + A concurrent monitoring thread checks for overdue updates and forces one + where necessary. + +The monitoring thread should not have a noticeable overhead, and guarantees +updates at least every 10 seconds by default. +This value can be directly changed by setting the ``monitor_interval`` of +any ``tqdm`` instance (i.e. ``t = tqdm.tqdm(...); t.monitor_interval = 2``). +The monitor thread may be disabled application-wide by setting +``tqdm.tqdm.monitor_interval = 0`` before instantiation of any ``tqdm`` bar. + + +Merch +----- + +You can buy `tqdm branded merch `__ now! + +Contributions +------------- + +|GitHub-Commits| |GitHub-Issues| |GitHub-PRs| |OpenHub-Status| |GitHub-Contributions| |CII Best Practices| + +All source code is hosted on `GitHub `__. +Contributions are welcome. + +See the +`CONTRIBUTING `__ +file for more information. + +Developers who have made significant contributions, ranked by *SLoC* +(surviving lines of code, +`git fame `__ ``-wMC --excl '\.(png|gif|jpg)$'``), +are: + +==================== ======================================================== ==== ================================ +Name ID SLoC Notes +==================== ======================================================== ==== ================================ +Casper da Costa-Luis `casperdcl `__ ~80% primary maintainer |Gift-Casper| +Stephen Larroque `lrq3000 `__ ~9% team member +Martin Zugnoni `martinzugnoni `__ ~3% +Daniel Ecer `de-code `__ ~2% +Richard Sheridan `richardsheridan `__ ~1% +Guangshuo Chen `chengs `__ ~1% +Helio Machado `0x2b3bfa0 `__ ~1% +Kyle Altendorf `altendky `__ <1% +Noam Yorav-Raphael `noamraph `__ <1% original author +Matthew Stevens `mjstevens777 `__ <1% +Hadrien Mary `hadim `__ <1% team member +Mikhail Korobov `kmike `__ <1% team member +==================== ======================================================== ==== ================================ + +Ports to Other Languages +~~~~~~~~~~~~~~~~~~~~~~~~ + +A list is available on +`this wiki page `__. + + +LICENCE +------- + +Open Source (OSI approved): |LICENCE| + +Citation information: |DOI| + +|README-Hits| (Since 19 May 2016) + +.. |Logo| image:: https://tqdm.github.io/img/logo.gif +.. |Screenshot| image:: https://tqdm.github.io/img/tqdm.gif +.. |Video| image:: https://tqdm.github.io/img/video.jpg + :target: https://tqdm.github.io/video +.. |Slides| image:: https://tqdm.github.io/img/slides.jpg + :target: https://tqdm.github.io/PyData2019/slides.html +.. |Merch| image:: https://tqdm.github.io/img/merch.jpg + :target: https://tqdm.github.io/merch +.. |Build-Status| image:: https://img.shields.io/github/actions/workflow/status/tqdm/tqdm/test.yml?branch=master&label=tqdm&logo=GitHub + :target: https://github.com/tqdm/tqdm/actions/workflows/test.yml +.. |Coverage-Status| image:: https://img.shields.io/coveralls/github/tqdm/tqdm/master?logo=coveralls + :target: https://coveralls.io/github/tqdm/tqdm +.. |Branch-Coverage-Status| image:: https://codecov.io/gh/tqdm/tqdm/branch/master/graph/badge.svg + :target: https://codecov.io/gh/tqdm/tqdm +.. |Codacy-Grade| image:: https://app.codacy.com/project/badge/Grade/3f965571598f44549c7818f29cdcf177 + :target: https://www.codacy.com/gh/tqdm/tqdm/dashboard +.. |CII Best Practices| image:: https://bestpractices.coreinfrastructure.org/projects/3264/badge + :target: https://bestpractices.coreinfrastructure.org/projects/3264 +.. |GitHub-Status| image:: https://img.shields.io/github/tag/tqdm/tqdm.svg?maxAge=86400&logo=github&logoColor=white + :target: https://github.com/tqdm/tqdm/releases +.. |GitHub-Forks| image:: https://img.shields.io/github/forks/tqdm/tqdm.svg?logo=github&logoColor=white + :target: https://github.com/tqdm/tqdm/network +.. |GitHub-Stars| image:: https://img.shields.io/github/stars/tqdm/tqdm.svg?logo=github&logoColor=white + :target: https://github.com/tqdm/tqdm/stargazers +.. |GitHub-Commits| image:: https://img.shields.io/github/commit-activity/y/tqdm/tqdm.svg?logo=git&logoColor=white + :target: https://github.com/tqdm/tqdm/graphs/commit-activity +.. |GitHub-Issues| image:: https://img.shields.io/github/issues-closed/tqdm/tqdm.svg?logo=github&logoColor=white + :target: https://github.com/tqdm/tqdm/issues?q= +.. |GitHub-PRs| image:: https://img.shields.io/github/issues-pr-closed/tqdm/tqdm.svg?logo=github&logoColor=white + :target: https://github.com/tqdm/tqdm/pulls +.. |GitHub-Contributions| image:: https://img.shields.io/github/contributors/tqdm/tqdm.svg?logo=github&logoColor=white + :target: https://github.com/tqdm/tqdm/graphs/contributors +.. |GitHub-Updated| image:: https://img.shields.io/github/last-commit/tqdm/tqdm/master.svg?logo=github&logoColor=white&label=pushed + :target: https://github.com/tqdm/tqdm/pulse +.. |Gift-Casper| image:: https://img.shields.io/badge/dynamic/json.svg?color=ff69b4&label=gifts%20received&prefix=%C2%A3&query=%24..sum&url=https%3A%2F%2Fcaspersci.uk.to%2Fgifts.json + :target: https://cdcl.ml/sponsor +.. |Versions| image:: https://img.shields.io/pypi/v/tqdm.svg + :target: https://tqdm.github.io/releases +.. |PyPI-Downloads| image:: https://img.shields.io/pypi/dm/tqdm.svg?label=pypi%20downloads&logo=PyPI&logoColor=white + :target: https://pepy.tech/project/tqdm +.. |Py-Versions| image:: https://img.shields.io/pypi/pyversions/tqdm.svg?logo=python&logoColor=white + :target: https://pypi.org/project/tqdm +.. |Conda-Forge-Status| image:: https://img.shields.io/conda/v/conda-forge/tqdm.svg?label=conda-forge&logo=conda-forge + :target: https://anaconda.org/conda-forge/tqdm +.. |Snapcraft| image:: https://img.shields.io/badge/snap-install-82BEA0.svg?logo=snapcraft + :target: https://snapcraft.io/tqdm +.. |Docker| image:: https://img.shields.io/badge/docker-pull-blue.svg?logo=docker&logoColor=white + :target: https://hub.docker.com/r/tqdm/tqdm +.. |Libraries-Rank| image:: https://img.shields.io/librariesio/sourcerank/pypi/tqdm.svg?logo=koding&logoColor=white + :target: https://libraries.io/pypi/tqdm +.. |Libraries-Dependents| image:: https://img.shields.io/librariesio/dependent-repos/pypi/tqdm.svg?logo=koding&logoColor=white + :target: https://github.com/tqdm/tqdm/network/dependents +.. |OpenHub-Status| image:: https://www.openhub.net/p/tqdm/widgets/project_thin_badge?format=gif + :target: https://www.openhub.net/p/tqdm?ref=Thin+badge +.. |awesome-python| image:: https://awesome.re/mentioned-badge.svg + :target: https://github.com/vinta/awesome-python +.. |LICENCE| image:: https://img.shields.io/pypi/l/tqdm.svg + :target: https://raw.githubusercontent.com/tqdm/tqdm/master/LICENCE +.. |DOI| image:: https://img.shields.io/badge/DOI-10.5281/zenodo.595120-blue.svg + :target: https://doi.org/10.5281/zenodo.595120 +.. |binder-demo| image:: https://mybinder.org/badge_logo.svg + :target: https://mybinder.org/v2/gh/tqdm/tqdm/master?filepath=DEMO.ipynb +.. |Screenshot-Jupyter1| image:: https://tqdm.github.io/img/jupyter-1.gif +.. |Screenshot-Jupyter2| image:: https://tqdm.github.io/img/jupyter-2.gif +.. |Screenshot-Jupyter3| image:: https://tqdm.github.io/img/jupyter-3.gif +.. |README-Hits| image:: https://cgi.cdcl.ml/hits?q=tqdm&style=social&r=https://github.com/tqdm/tqdm&l=https://tqdm.github.io/img/favicon.png&f=https://tqdm.github.io/img/logo.gif + :target: https://cgi.cdcl.ml/hits?q=tqdm&a=plot&r=https://github.com/tqdm/tqdm&l=https://tqdm.github.io/img/favicon.png&f=https://tqdm.github.io/img/logo.gif&style=social diff --git a/venv/lib/python3.12/site-packages/tqdm-4.67.1.dist-info/RECORD b/venv/lib/python3.12/site-packages/tqdm-4.67.1.dist-info/RECORD new file mode 100644 index 00000000..9285971d --- /dev/null +++ b/venv/lib/python3.12/site-packages/tqdm-4.67.1.dist-info/RECORD @@ -0,0 +1,75 @@ +../../../bin/tqdm,sha256=f-GY6EfRq30TqBaI27jwY4RzpSFL2o9okUTIDNR-zvA,245 +tqdm-4.67.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +tqdm-4.67.1.dist-info/LICENCE,sha256=3DMlLoKQFeOxUAhvubOkD2rW-zLC9GEM6BL6Z301mGo,1985 +tqdm-4.67.1.dist-info/METADATA,sha256=aIoWMt9SWhmP7FLc_vsSRtMerO6cA1qsrC1-r42P9mk,57675 +tqdm-4.67.1.dist-info/RECORD,, +tqdm-4.67.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +tqdm-4.67.1.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91 +tqdm-4.67.1.dist-info/entry_points.txt,sha256=ReJCH7Ui3Zyh6M16E4OhsZ1oU7WtMXCfbtoyBhGO29Y,39 +tqdm-4.67.1.dist-info/top_level.txt,sha256=NLiUJNfmc9At15s7JURiwvqMEjUi9G5PMGRrmMYzNSM,5 +tqdm/__init__.py,sha256=9mQNYSSqP99JasubEC1POJLMmhkkBH6cJZxPIR5G2pQ,1572 +tqdm/__main__.py,sha256=bYt9eEaoRQWdejEHFD8REx9jxVEdZptECFsV7F49Ink,30 +tqdm/__pycache__/__init__.cpython-312.pyc,, +tqdm/__pycache__/__main__.cpython-312.pyc,, +tqdm/__pycache__/_dist_ver.cpython-312.pyc,, +tqdm/__pycache__/_main.cpython-312.pyc,, +tqdm/__pycache__/_monitor.cpython-312.pyc,, +tqdm/__pycache__/_tqdm.cpython-312.pyc,, +tqdm/__pycache__/_tqdm_gui.cpython-312.pyc,, +tqdm/__pycache__/_tqdm_notebook.cpython-312.pyc,, +tqdm/__pycache__/_tqdm_pandas.cpython-312.pyc,, +tqdm/__pycache__/_utils.cpython-312.pyc,, +tqdm/__pycache__/asyncio.cpython-312.pyc,, +tqdm/__pycache__/auto.cpython-312.pyc,, +tqdm/__pycache__/autonotebook.cpython-312.pyc,, +tqdm/__pycache__/cli.cpython-312.pyc,, +tqdm/__pycache__/dask.cpython-312.pyc,, +tqdm/__pycache__/gui.cpython-312.pyc,, +tqdm/__pycache__/keras.cpython-312.pyc,, +tqdm/__pycache__/notebook.cpython-312.pyc,, +tqdm/__pycache__/rich.cpython-312.pyc,, +tqdm/__pycache__/std.cpython-312.pyc,, +tqdm/__pycache__/tk.cpython-312.pyc,, +tqdm/__pycache__/utils.cpython-312.pyc,, +tqdm/__pycache__/version.cpython-312.pyc,, +tqdm/_dist_ver.py,sha256=m5AdYI-jB-v6P0VJ_70isH_p24EzSOGSwVvuAZmkmKY,23 +tqdm/_main.py,sha256=9ySvgmi_2Sw4CAo5UDW0Q2dxfTryboEWGHohfCJz0sA,283 +tqdm/_monitor.py,sha256=Uku-DPWgzJ7dO5CK08xKJK-E_F6qQ-JB3ksuXczSYR0,3699 +tqdm/_tqdm.py,sha256=LfLCuJ6bpsVo9xilmtBXyEm1vGnUCFrliW85j3J-nD4,283 +tqdm/_tqdm_gui.py,sha256=03Hc8KayxJveieI5-0-2NGiDpLvw9jZekofJUV7CCwk,287 +tqdm/_tqdm_notebook.py,sha256=BuHiLuxu6uEfZFaPJW3RPpPaxaVctEQA3kdSJSDL1hw,307 +tqdm/_tqdm_pandas.py,sha256=c9jptUgigN6axRDhRd4Rif98Tmxeopc1nFNFhIpbFUE,888 +tqdm/_utils.py,sha256=_4E73bfDj4f1s3sM42NLHNrZDOkijZoWq-n6xWLkdZ8,553 +tqdm/asyncio.py,sha256=Kp2rSkNRf9KRqa3d9YpgeZQ7L7EZf2Ki4bSc7UPIyoo,2757 +tqdm/auto.py,sha256=nDZflj6p2zKkjBCNBourrhS81zYfZy1_dQvbckrdW8o,871 +tqdm/autonotebook.py,sha256=Yb9F5uaiBPhfbDDFpbtoG8I2YUw3uQJ89rUDLbfR6ws,956 +tqdm/cli.py,sha256=SbKlN8QyZ2ogenqt-wT_p6_sx2OOdCjCyhoZBFnlmyI,11010 +tqdm/completion.sh,sha256=j79KbSmpIj_E11jfTfBXrGnUTzKXVpQ1vGVQvsyDRl4,946 +tqdm/contrib/__init__.py,sha256=OgSwVXm-vlDJ-2imtoQ9z8qdom4snMSRztH72KMA82A,2494 +tqdm/contrib/__pycache__/__init__.cpython-312.pyc,, +tqdm/contrib/__pycache__/bells.cpython-312.pyc,, +tqdm/contrib/__pycache__/concurrent.cpython-312.pyc,, +tqdm/contrib/__pycache__/discord.cpython-312.pyc,, +tqdm/contrib/__pycache__/itertools.cpython-312.pyc,, +tqdm/contrib/__pycache__/logging.cpython-312.pyc,, +tqdm/contrib/__pycache__/slack.cpython-312.pyc,, +tqdm/contrib/__pycache__/telegram.cpython-312.pyc,, +tqdm/contrib/__pycache__/utils_worker.cpython-312.pyc,, +tqdm/contrib/bells.py,sha256=Yx1HqGCmHrESCAO700j5wE__JCleNODJxedh1ijPLD0,837 +tqdm/contrib/concurrent.py,sha256=K1yjloKS5WRNFyjLRth0DmU5PAnDbF0A-GD27N-J4a8,3986 +tqdm/contrib/discord.py,sha256=MtVIL1s_dxH21G4sL8FBgQ4Wei23ho9Ek5T-AommvNc,5243 +tqdm/contrib/itertools.py,sha256=WdKKQU5eSzsqHu29SN_oH12huYZo0Jihqoi9-nVhwz4,774 +tqdm/contrib/logging.py,sha256=NsYtnKttj2mMrGm58mEdo5a9DP_2vv8pZyrimSuWulA,3760 +tqdm/contrib/slack.py,sha256=eP_Mr5sQonYniHxxQNGue3jk2JkIPmPWFZqIYxnOui0,4007 +tqdm/contrib/telegram.py,sha256=vn_9SATMbbwn2PAbzSDyOX6av3eBB01QBug11P4H-Og,5008 +tqdm/contrib/utils_worker.py,sha256=HJP5Mz1S1xyzEke2JaqJ2sYLHXADYoo2epT5AzQ38eA,1207 +tqdm/dask.py,sha256=9Ei58eVqTossRLhAfWyUFCduXYKjmLmwkaXIy-CHYfs,1319 +tqdm/gui.py,sha256=STIB3K8iDzDgkNUqWIpvcI_u0OGtbGNy5NwpALXhfWs,5479 +tqdm/keras.py,sha256=op9sBkb6q6c6dw2wJ0SD2ZwpPK7yM1Vbg4l1Qiy3MIo,4373 +tqdm/notebook.py,sha256=GtZ3IapLL1v8WNDaTSvPw0bJGTyfp71Vfz5HDnAzx1M,10895 +tqdm/rich.py,sha256=YyMPkEHVyYUVUR3adJKbVX26iTmNKpNMf3DEqmm-m60,5021 +tqdm/std.py,sha256=tWjz6-QCa92aqYjz7PIdkLUCAfiy-lJZheBtZyIIyO0,57461 +tqdm/tk.py,sha256=Gu0uwXwLCGPRGHORdi3WvBLGiseUp_xxX_h_gp9VpK0,6701 +tqdm/tqdm.1,sha256=aILyUPk2S4OPe_uWy2P4AMjUf0oQ6PUW0nLYXB-BWwI,7889 +tqdm/utils.py,sha256=6E0BQw3Sg7uGWKBM_cDn3P42tXswRhzkggbhBgLDjl8,11821 +tqdm/version.py,sha256=-1yWjfu3P0eghVsysHH07fbzdiADNRdzRtYPqOaqR2A,333 diff --git a/venv/lib/python3.12/site-packages/tqdm-4.67.1.dist-info/REQUESTED b/venv/lib/python3.12/site-packages/tqdm-4.67.1.dist-info/REQUESTED new file mode 100644 index 00000000..e69de29b diff --git a/venv/lib/python3.12/site-packages/tqdm-4.67.1.dist-info/WHEEL b/venv/lib/python3.12/site-packages/tqdm-4.67.1.dist-info/WHEEL new file mode 100644 index 00000000..ae527e7d --- /dev/null +++ b/venv/lib/python3.12/site-packages/tqdm-4.67.1.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: setuptools (75.6.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/lib/python3.12/site-packages/tqdm-4.67.1.dist-info/entry_points.txt b/venv/lib/python3.12/site-packages/tqdm-4.67.1.dist-info/entry_points.txt new file mode 100644 index 00000000..540e60f4 --- /dev/null +++ b/venv/lib/python3.12/site-packages/tqdm-4.67.1.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[console_scripts] +tqdm = tqdm.cli:main diff --git a/venv/lib/python3.12/site-packages/tqdm-4.67.1.dist-info/top_level.txt b/venv/lib/python3.12/site-packages/tqdm-4.67.1.dist-info/top_level.txt new file mode 100644 index 00000000..78620c47 --- /dev/null +++ b/venv/lib/python3.12/site-packages/tqdm-4.67.1.dist-info/top_level.txt @@ -0,0 +1 @@ +tqdm diff --git a/venv/lib/python3.12/site-packages/tqdm/__init__.py b/venv/lib/python3.12/site-packages/tqdm/__init__.py new file mode 100644 index 00000000..8081f77b --- /dev/null +++ b/venv/lib/python3.12/site-packages/tqdm/__init__.py @@ -0,0 +1,38 @@ +from ._monitor import TMonitor, TqdmSynchronisationWarning +from ._tqdm_pandas import tqdm_pandas +from .cli import main # TODO: remove in v5.0.0 +from .gui import tqdm as tqdm_gui # TODO: remove in v5.0.0 +from .gui import trange as tgrange # TODO: remove in v5.0.0 +from .std import ( + TqdmDeprecationWarning, TqdmExperimentalWarning, TqdmKeyError, TqdmMonitorWarning, + TqdmTypeError, TqdmWarning, tqdm, trange) +from .version import __version__ + +__all__ = ['tqdm', 'tqdm_gui', 'trange', 'tgrange', 'tqdm_pandas', + 'tqdm_notebook', 'tnrange', 'main', 'TMonitor', + 'TqdmTypeError', 'TqdmKeyError', + 'TqdmWarning', 'TqdmDeprecationWarning', + 'TqdmExperimentalWarning', + 'TqdmMonitorWarning', 'TqdmSynchronisationWarning', + '__version__'] + + +def tqdm_notebook(*args, **kwargs): # pragma: no cover + """See tqdm.notebook.tqdm for full documentation""" + from warnings import warn + + from .notebook import tqdm as _tqdm_notebook + warn("This function will be removed in tqdm==5.0.0\n" + "Please use `tqdm.notebook.tqdm` instead of `tqdm.tqdm_notebook`", + TqdmDeprecationWarning, stacklevel=2) + return _tqdm_notebook(*args, **kwargs) + + +def tnrange(*args, **kwargs): # pragma: no cover + """Shortcut for `tqdm.notebook.tqdm(range(*args), **kwargs)`.""" + from warnings import warn + + from .notebook import trange as _tnrange + warn("Please use `tqdm.notebook.trange` instead of `tqdm.tnrange`", + TqdmDeprecationWarning, stacklevel=2) + return _tnrange(*args, **kwargs) diff --git a/venv/lib/python3.12/site-packages/tqdm/__main__.py b/venv/lib/python3.12/site-packages/tqdm/__main__.py new file mode 100644 index 00000000..4e28416e --- /dev/null +++ b/venv/lib/python3.12/site-packages/tqdm/__main__.py @@ -0,0 +1,3 @@ +from .cli import main + +main() diff --git a/venv/lib/python3.12/site-packages/tqdm/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/tqdm/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..eb6ca733 Binary files /dev/null and b/venv/lib/python3.12/site-packages/tqdm/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/tqdm/__pycache__/__main__.cpython-312.pyc b/venv/lib/python3.12/site-packages/tqdm/__pycache__/__main__.cpython-312.pyc new file mode 100644 index 00000000..7653effb Binary files /dev/null and b/venv/lib/python3.12/site-packages/tqdm/__pycache__/__main__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/tqdm/__pycache__/_dist_ver.cpython-312.pyc b/venv/lib/python3.12/site-packages/tqdm/__pycache__/_dist_ver.cpython-312.pyc new file mode 100644 index 00000000..2f9f10d3 Binary files /dev/null and b/venv/lib/python3.12/site-packages/tqdm/__pycache__/_dist_ver.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/tqdm/__pycache__/_main.cpython-312.pyc b/venv/lib/python3.12/site-packages/tqdm/__pycache__/_main.cpython-312.pyc new file mode 100644 index 00000000..611568e8 Binary files /dev/null and b/venv/lib/python3.12/site-packages/tqdm/__pycache__/_main.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/tqdm/__pycache__/_monitor.cpython-312.pyc b/venv/lib/python3.12/site-packages/tqdm/__pycache__/_monitor.cpython-312.pyc new file mode 100644 index 00000000..62019d5e Binary files /dev/null and b/venv/lib/python3.12/site-packages/tqdm/__pycache__/_monitor.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/tqdm/__pycache__/_tqdm.cpython-312.pyc b/venv/lib/python3.12/site-packages/tqdm/__pycache__/_tqdm.cpython-312.pyc new file mode 100644 index 00000000..73d808b7 Binary files /dev/null and b/venv/lib/python3.12/site-packages/tqdm/__pycache__/_tqdm.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/tqdm/__pycache__/_tqdm_gui.cpython-312.pyc b/venv/lib/python3.12/site-packages/tqdm/__pycache__/_tqdm_gui.cpython-312.pyc new file mode 100644 index 00000000..788ad8c5 Binary files /dev/null and b/venv/lib/python3.12/site-packages/tqdm/__pycache__/_tqdm_gui.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/tqdm/__pycache__/_tqdm_notebook.cpython-312.pyc b/venv/lib/python3.12/site-packages/tqdm/__pycache__/_tqdm_notebook.cpython-312.pyc new file mode 100644 index 00000000..b6d0ce05 Binary files /dev/null and b/venv/lib/python3.12/site-packages/tqdm/__pycache__/_tqdm_notebook.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/tqdm/__pycache__/_tqdm_pandas.cpython-312.pyc b/venv/lib/python3.12/site-packages/tqdm/__pycache__/_tqdm_pandas.cpython-312.pyc new file mode 100644 index 00000000..8de65b99 Binary files /dev/null and b/venv/lib/python3.12/site-packages/tqdm/__pycache__/_tqdm_pandas.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/tqdm/__pycache__/_utils.cpython-312.pyc b/venv/lib/python3.12/site-packages/tqdm/__pycache__/_utils.cpython-312.pyc new file mode 100644 index 00000000..cced8412 Binary files /dev/null and b/venv/lib/python3.12/site-packages/tqdm/__pycache__/_utils.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/tqdm/__pycache__/asyncio.cpython-312.pyc b/venv/lib/python3.12/site-packages/tqdm/__pycache__/asyncio.cpython-312.pyc new file mode 100644 index 00000000..bd04ecde Binary files /dev/null and b/venv/lib/python3.12/site-packages/tqdm/__pycache__/asyncio.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/tqdm/__pycache__/auto.cpython-312.pyc b/venv/lib/python3.12/site-packages/tqdm/__pycache__/auto.cpython-312.pyc new file mode 100644 index 00000000..8ad60fcb Binary files /dev/null and b/venv/lib/python3.12/site-packages/tqdm/__pycache__/auto.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/tqdm/__pycache__/autonotebook.cpython-312.pyc b/venv/lib/python3.12/site-packages/tqdm/__pycache__/autonotebook.cpython-312.pyc new file mode 100644 index 00000000..26bca0f6 Binary files /dev/null and b/venv/lib/python3.12/site-packages/tqdm/__pycache__/autonotebook.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/tqdm/__pycache__/cli.cpython-312.pyc b/venv/lib/python3.12/site-packages/tqdm/__pycache__/cli.cpython-312.pyc new file mode 100644 index 00000000..d36a1967 Binary files /dev/null and b/venv/lib/python3.12/site-packages/tqdm/__pycache__/cli.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/tqdm/__pycache__/dask.cpython-312.pyc b/venv/lib/python3.12/site-packages/tqdm/__pycache__/dask.cpython-312.pyc new file mode 100644 index 00000000..6ddd70b9 Binary files /dev/null and b/venv/lib/python3.12/site-packages/tqdm/__pycache__/dask.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/tqdm/__pycache__/gui.cpython-312.pyc b/venv/lib/python3.12/site-packages/tqdm/__pycache__/gui.cpython-312.pyc new file mode 100644 index 00000000..a7bccb1a Binary files /dev/null and b/venv/lib/python3.12/site-packages/tqdm/__pycache__/gui.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/tqdm/__pycache__/keras.cpython-312.pyc b/venv/lib/python3.12/site-packages/tqdm/__pycache__/keras.cpython-312.pyc new file mode 100644 index 00000000..e0fd8089 Binary files /dev/null and b/venv/lib/python3.12/site-packages/tqdm/__pycache__/keras.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/tqdm/__pycache__/notebook.cpython-312.pyc b/venv/lib/python3.12/site-packages/tqdm/__pycache__/notebook.cpython-312.pyc new file mode 100644 index 00000000..c6665c20 Binary files /dev/null and b/venv/lib/python3.12/site-packages/tqdm/__pycache__/notebook.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/tqdm/__pycache__/rich.cpython-312.pyc b/venv/lib/python3.12/site-packages/tqdm/__pycache__/rich.cpython-312.pyc new file mode 100644 index 00000000..bfad8730 Binary files /dev/null and b/venv/lib/python3.12/site-packages/tqdm/__pycache__/rich.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/tqdm/__pycache__/std.cpython-312.pyc b/venv/lib/python3.12/site-packages/tqdm/__pycache__/std.cpython-312.pyc new file mode 100644 index 00000000..9fc841d8 Binary files /dev/null and b/venv/lib/python3.12/site-packages/tqdm/__pycache__/std.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/tqdm/__pycache__/tk.cpython-312.pyc b/venv/lib/python3.12/site-packages/tqdm/__pycache__/tk.cpython-312.pyc new file mode 100644 index 00000000..2f3cea31 Binary files /dev/null and b/venv/lib/python3.12/site-packages/tqdm/__pycache__/tk.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/tqdm/__pycache__/utils.cpython-312.pyc b/venv/lib/python3.12/site-packages/tqdm/__pycache__/utils.cpython-312.pyc new file mode 100644 index 00000000..e4a605ac Binary files /dev/null and b/venv/lib/python3.12/site-packages/tqdm/__pycache__/utils.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/tqdm/__pycache__/version.cpython-312.pyc b/venv/lib/python3.12/site-packages/tqdm/__pycache__/version.cpython-312.pyc new file mode 100644 index 00000000..db96c913 Binary files /dev/null and b/venv/lib/python3.12/site-packages/tqdm/__pycache__/version.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/tqdm/_dist_ver.py b/venv/lib/python3.12/site-packages/tqdm/_dist_ver.py new file mode 100644 index 00000000..61af7d5b --- /dev/null +++ b/venv/lib/python3.12/site-packages/tqdm/_dist_ver.py @@ -0,0 +1 @@ +__version__ = '4.67.1' diff --git a/venv/lib/python3.12/site-packages/tqdm/_main.py b/venv/lib/python3.12/site-packages/tqdm/_main.py new file mode 100644 index 00000000..04fdeeff --- /dev/null +++ b/venv/lib/python3.12/site-packages/tqdm/_main.py @@ -0,0 +1,9 @@ +from warnings import warn + +from .cli import * # NOQA +from .cli import __all__ # NOQA +from .std import TqdmDeprecationWarning + +warn("This function will be removed in tqdm==5.0.0\n" + "Please use `tqdm.cli.*` instead of `tqdm._main.*`", + TqdmDeprecationWarning, stacklevel=2) diff --git a/venv/lib/python3.12/site-packages/tqdm/_monitor.py b/venv/lib/python3.12/site-packages/tqdm/_monitor.py new file mode 100644 index 00000000..f71aa568 --- /dev/null +++ b/venv/lib/python3.12/site-packages/tqdm/_monitor.py @@ -0,0 +1,95 @@ +import atexit +from threading import Event, Thread, current_thread +from time import time +from warnings import warn + +__all__ = ["TMonitor", "TqdmSynchronisationWarning"] + + +class TqdmSynchronisationWarning(RuntimeWarning): + """tqdm multi-thread/-process errors which may cause incorrect nesting + but otherwise no adverse effects""" + pass + + +class TMonitor(Thread): + """ + Monitoring thread for tqdm bars. + Monitors if tqdm bars are taking too much time to display + and readjusts miniters automatically if necessary. + + Parameters + ---------- + tqdm_cls : class + tqdm class to use (can be core tqdm or a submodule). + sleep_interval : float + Time to sleep between monitoring checks. + """ + _test = {} # internal vars for unit testing + + def __init__(self, tqdm_cls, sleep_interval): + Thread.__init__(self) + self.daemon = True # kill thread when main killed (KeyboardInterrupt) + self.woken = 0 # last time woken up, to sync with monitor + self.tqdm_cls = tqdm_cls + self.sleep_interval = sleep_interval + self._time = self._test.get("time", time) + self.was_killed = self._test.get("Event", Event)() + atexit.register(self.exit) + self.start() + + def exit(self): + self.was_killed.set() + if self is not current_thread(): + self.join() + return self.report() + + def get_instances(self): + # returns a copy of started `tqdm_cls` instances + return [i for i in self.tqdm_cls._instances.copy() + # Avoid race by checking that the instance started + if hasattr(i, 'start_t')] + + def run(self): + cur_t = self._time() + while True: + # After processing and before sleeping, notify that we woke + # Need to be done just before sleeping + self.woken = cur_t + # Sleep some time... + self.was_killed.wait(self.sleep_interval) + # Quit if killed + if self.was_killed.is_set(): + return + # Then monitor! + # Acquire lock (to access _instances) + with self.tqdm_cls.get_lock(): + cur_t = self._time() + # Check tqdm instances are waiting too long to print + instances = self.get_instances() + for instance in instances: + # Check event in loop to reduce blocking time on exit + if self.was_killed.is_set(): + return + # Only if mininterval > 1 (else iterations are just slow) + # and last refresh exceeded maxinterval + if ( + instance.miniters > 1 + and (cur_t - instance.last_print_t) >= instance.maxinterval + ): + # force bypassing miniters on next iteration + # (dynamic_miniters adjusts mininterval automatically) + instance.miniters = 1 + # Refresh now! (works only for manual tqdm) + instance.refresh(nolock=True) + # Remove accidental long-lived strong reference + del instance + if instances != self.get_instances(): # pragma: nocover + warn("Set changed size during iteration" + + " (see https://github.com/tqdm/tqdm/issues/481)", + TqdmSynchronisationWarning, stacklevel=2) + # Remove accidental long-lived strong references + del instances + + def report(self): + return not self.was_killed.is_set() diff --git a/venv/lib/python3.12/site-packages/tqdm/_tqdm.py b/venv/lib/python3.12/site-packages/tqdm/_tqdm.py new file mode 100644 index 00000000..7fc49627 --- /dev/null +++ b/venv/lib/python3.12/site-packages/tqdm/_tqdm.py @@ -0,0 +1,9 @@ +from warnings import warn + +from .std import * # NOQA +from .std import __all__ # NOQA +from .std import TqdmDeprecationWarning + +warn("This function will be removed in tqdm==5.0.0\n" + "Please use `tqdm.std.*` instead of `tqdm._tqdm.*`", + TqdmDeprecationWarning, stacklevel=2) diff --git a/venv/lib/python3.12/site-packages/tqdm/_tqdm_gui.py b/venv/lib/python3.12/site-packages/tqdm/_tqdm_gui.py new file mode 100644 index 00000000..f32aa894 --- /dev/null +++ b/venv/lib/python3.12/site-packages/tqdm/_tqdm_gui.py @@ -0,0 +1,9 @@ +from warnings import warn + +from .gui import * # NOQA +from .gui import __all__ # NOQA +from .std import TqdmDeprecationWarning + +warn("This function will be removed in tqdm==5.0.0\n" + "Please use `tqdm.gui.*` instead of `tqdm._tqdm_gui.*`", + TqdmDeprecationWarning, stacklevel=2) diff --git a/venv/lib/python3.12/site-packages/tqdm/_tqdm_notebook.py b/venv/lib/python3.12/site-packages/tqdm/_tqdm_notebook.py new file mode 100644 index 00000000..f225fbf5 --- /dev/null +++ b/venv/lib/python3.12/site-packages/tqdm/_tqdm_notebook.py @@ -0,0 +1,9 @@ +from warnings import warn + +from .notebook import * # NOQA +from .notebook import __all__ # NOQA +from .std import TqdmDeprecationWarning + +warn("This function will be removed in tqdm==5.0.0\n" + "Please use `tqdm.notebook.*` instead of `tqdm._tqdm_notebook.*`", + TqdmDeprecationWarning, stacklevel=2) diff --git a/venv/lib/python3.12/site-packages/tqdm/_tqdm_pandas.py b/venv/lib/python3.12/site-packages/tqdm/_tqdm_pandas.py new file mode 100644 index 00000000..c4fe6efd --- /dev/null +++ b/venv/lib/python3.12/site-packages/tqdm/_tqdm_pandas.py @@ -0,0 +1,24 @@ +import sys + +__author__ = "github.com/casperdcl" +__all__ = ['tqdm_pandas'] + + +def tqdm_pandas(tclass, **tqdm_kwargs): + """ + Registers the given `tqdm` instance with + `pandas.core.groupby.DataFrameGroupBy.progress_apply`. + """ + from tqdm import TqdmDeprecationWarning + + if isinstance(tclass, type) or (getattr(tclass, '__name__', '').startswith( + 'tqdm_')): # delayed adapter case + TqdmDeprecationWarning( + "Please use `tqdm.pandas(...)` instead of `tqdm_pandas(tqdm, ...)`.", + fp_write=getattr(tqdm_kwargs.get('file', None), 'write', sys.stderr.write)) + tclass.pandas(**tqdm_kwargs) + else: + TqdmDeprecationWarning( + "Please use `tqdm.pandas(...)` instead of `tqdm_pandas(tqdm(...))`.", + fp_write=getattr(tclass.fp, 'write', sys.stderr.write)) + type(tclass).pandas(deprecated_t=tclass) diff --git a/venv/lib/python3.12/site-packages/tqdm/_utils.py b/venv/lib/python3.12/site-packages/tqdm/_utils.py new file mode 100644 index 00000000..385e849e --- /dev/null +++ b/venv/lib/python3.12/site-packages/tqdm/_utils.py @@ -0,0 +1,11 @@ +from warnings import warn + +from .std import TqdmDeprecationWarning +from .utils import ( # NOQA, pylint: disable=unused-import + CUR_OS, IS_NIX, IS_WIN, RE_ANSI, Comparable, FormatReplace, SimpleTextIOWrapper, + _environ_cols_wrapper, _is_ascii, _is_utf, _screen_shape_linux, _screen_shape_tput, + _screen_shape_windows, _screen_shape_wrapper, _supports_unicode, _term_move_up, colorama) + +warn("This function will be removed in tqdm==5.0.0\n" + "Please use `tqdm.utils.*` instead of `tqdm._utils.*`", + TqdmDeprecationWarning, stacklevel=2) diff --git a/venv/lib/python3.12/site-packages/tqdm/asyncio.py b/venv/lib/python3.12/site-packages/tqdm/asyncio.py new file mode 100644 index 00000000..2d00a0a2 --- /dev/null +++ b/venv/lib/python3.12/site-packages/tqdm/asyncio.py @@ -0,0 +1,93 @@ +""" +Asynchronous progressbar decorator for iterators. +Includes a default `range` iterator printing to `stderr`. + +Usage: +>>> from tqdm.asyncio import trange, tqdm +>>> async for i in trange(10): +... ... +""" +import asyncio +from sys import version_info + +from .std import tqdm as std_tqdm + +__author__ = {"github.com/": ["casperdcl"]} +__all__ = ['tqdm_asyncio', 'tarange', 'tqdm', 'trange'] + + +class tqdm_asyncio(std_tqdm): + """ + Asynchronous-friendly version of tqdm. + """ + def __init__(self, iterable=None, *args, **kwargs): + super().__init__(iterable, *args, **kwargs) + self.iterable_awaitable = False + if iterable is not None: + if hasattr(iterable, "__anext__"): + self.iterable_next = iterable.__anext__ + self.iterable_awaitable = True + elif hasattr(iterable, "__next__"): + self.iterable_next = iterable.__next__ + else: + self.iterable_iterator = iter(iterable) + self.iterable_next = self.iterable_iterator.__next__ + + def __aiter__(self): + return self + + async def __anext__(self): + try: + if self.iterable_awaitable: + res = await self.iterable_next() + else: + res = self.iterable_next() + self.update() + return res + except StopIteration: + self.close() + raise StopAsyncIteration + except BaseException: + self.close() + raise + + def send(self, *args, **kwargs): + return self.iterable.send(*args, **kwargs) + + @classmethod + def as_completed(cls, fs, *, loop=None, timeout=None, total=None, **tqdm_kwargs): + """ + Wrapper for `asyncio.as_completed`. + """ + if total is None: + total = len(fs) + kwargs = {} + if version_info[:2] < (3, 10): + kwargs['loop'] = loop + yield from cls(asyncio.as_completed(fs, timeout=timeout, **kwargs), + total=total, **tqdm_kwargs) + + @classmethod + async def gather(cls, *fs, loop=None, timeout=None, total=None, **tqdm_kwargs): + """ + Wrapper for `asyncio.gather`. + """ + async def wrap_awaitable(i, f): + return i, await f + + ifs = [wrap_awaitable(i, f) for i, f in enumerate(fs)] + res = [await f for f in cls.as_completed(ifs, loop=loop, timeout=timeout, + total=total, **tqdm_kwargs)] + return [i for _, i in sorted(res)] + + +def tarange(*args, **kwargs): + """ + A shortcut for `tqdm.asyncio.tqdm(range(*args), **kwargs)`. + """ + return tqdm_asyncio(range(*args), **kwargs) + + +# Aliases +tqdm = tqdm_asyncio +trange = tarange diff --git a/venv/lib/python3.12/site-packages/tqdm/auto.py b/venv/lib/python3.12/site-packages/tqdm/auto.py new file mode 100644 index 00000000..206c4409 --- /dev/null +++ b/venv/lib/python3.12/site-packages/tqdm/auto.py @@ -0,0 +1,40 @@ +""" +Enables multiple commonly used features. + +Method resolution order: + +- `tqdm.autonotebook` without import warnings +- `tqdm.asyncio` +- `tqdm.std` base class + +Usage: +>>> from tqdm.auto import trange, tqdm +>>> for i in trange(10): +... ... +""" +import warnings + +from .std import TqdmExperimentalWarning + +with warnings.catch_warnings(): + warnings.simplefilter("ignore", category=TqdmExperimentalWarning) + from .autonotebook import tqdm as notebook_tqdm + +from .asyncio import tqdm as asyncio_tqdm +from .std import tqdm as std_tqdm + +if notebook_tqdm != std_tqdm: + class tqdm(notebook_tqdm, asyncio_tqdm): # pylint: disable=inconsistent-mro + pass +else: + tqdm = asyncio_tqdm + + +def trange(*args, **kwargs): + """ + A shortcut for `tqdm.auto.tqdm(range(*args), **kwargs)`. + """ + return tqdm(range(*args), **kwargs) + + +__all__ = ["tqdm", "trange"] diff --git a/venv/lib/python3.12/site-packages/tqdm/autonotebook.py b/venv/lib/python3.12/site-packages/tqdm/autonotebook.py new file mode 100644 index 00000000..a09f2ec4 --- /dev/null +++ b/venv/lib/python3.12/site-packages/tqdm/autonotebook.py @@ -0,0 +1,29 @@ +""" +Automatically choose between `tqdm.notebook` and `tqdm.std`. + +Usage: +>>> from tqdm.autonotebook import trange, tqdm +>>> for i in trange(10): +... ... +""" +import sys +from warnings import warn + +try: + get_ipython = sys.modules['IPython'].get_ipython + if 'IPKernelApp' not in get_ipython().config: # pragma: no cover + raise ImportError("console") + from .notebook import WARN_NOIPYW, IProgress + if IProgress is None: + from .std import TqdmWarning + warn(WARN_NOIPYW, TqdmWarning, stacklevel=2) + raise ImportError('ipywidgets') +except Exception: + from .std import tqdm, trange +else: # pragma: no cover + from .notebook import tqdm, trange + from .std import TqdmExperimentalWarning + warn("Using `tqdm.autonotebook.tqdm` in notebook mode." + " Use `tqdm.tqdm` instead to force console mode" + " (e.g. in jupyter console)", TqdmExperimentalWarning, stacklevel=2) +__all__ = ["tqdm", "trange"] diff --git a/venv/lib/python3.12/site-packages/tqdm/cli.py b/venv/lib/python3.12/site-packages/tqdm/cli.py new file mode 100644 index 00000000..e54a7fc8 --- /dev/null +++ b/venv/lib/python3.12/site-packages/tqdm/cli.py @@ -0,0 +1,324 @@ +""" +Module version for monitoring CLI pipes (`... | python -m tqdm | ...`). +""" +import logging +import re +import sys +from ast import literal_eval as numeric +from textwrap import indent + +from .std import TqdmKeyError, TqdmTypeError, tqdm +from .version import __version__ + +__all__ = ["main"] +log = logging.getLogger(__name__) + + +def cast(val, typ): + log.debug((val, typ)) + if " or " in typ: + for t in typ.split(" or "): + try: + return cast(val, t) + except TqdmTypeError: + pass + raise TqdmTypeError(f"{val} : {typ}") + + # sys.stderr.write('\ndebug | `val:type`: `' + val + ':' + typ + '`.\n') + if typ == 'bool': + if (val == 'True') or (val == ''): + return True + if val == 'False': + return False + raise TqdmTypeError(val + ' : ' + typ) + if typ == 'chr': + if len(val) == 1: + return val.encode() + if re.match(r"^\\\w+$", val): + return eval(f'"{val}"').encode() + raise TqdmTypeError(f"{val} : {typ}") + if typ == 'str': + return val + if typ == 'int': + try: + return int(val) + except ValueError as exc: + raise TqdmTypeError(f"{val} : {typ}") from exc + if typ == 'float': + try: + return float(val) + except ValueError as exc: + raise TqdmTypeError(f"{val} : {typ}") from exc + raise TqdmTypeError(f"{val} : {typ}") + + +def posix_pipe(fin, fout, delim=b'\\n', buf_size=256, + callback=lambda float: None, callback_len=True): + """ + Params + ------ + fin : binary file with `read(buf_size : int)` method + fout : binary file with `write` (and optionally `flush`) methods. + callback : function(float), e.g.: `tqdm.update` + callback_len : If (default: True) do `callback(len(buffer))`. + Otherwise, do `callback(data) for data in buffer.split(delim)`. + """ + fp_write = fout.write + + if not delim: + while True: + tmp = fin.read(buf_size) + + # flush at EOF + if not tmp: + getattr(fout, 'flush', lambda: None)() + return + + fp_write(tmp) + callback(len(tmp)) + # return + + buf = b'' + len_delim = len(delim) + # n = 0 + while True: + tmp = fin.read(buf_size) + + # flush at EOF + if not tmp: + if buf: + fp_write(buf) + if callback_len: + # n += 1 + buf.count(delim) + callback(1 + buf.count(delim)) + else: + for i in buf.split(delim): + callback(i) + getattr(fout, 'flush', lambda: None)() + return # n + + while True: + i = tmp.find(delim) + if i < 0: + buf += tmp + break + fp_write(buf + tmp[:i + len(delim)]) + # n += 1 + callback(1 if callback_len else (buf + tmp[:i])) + buf = b'' + tmp = tmp[i + len_delim:] + + +# ((opt, type), ... ) +RE_OPTS = re.compile(r'\n {4}(\S+)\s{2,}:\s*([^,]+)') +# better split method assuming no positional args +RE_SHLEX = re.compile(r'\s*(? : \2', d) + split = RE_OPTS.split(d) + opt_types_desc = zip(split[1::3], split[2::3], split[3::3]) + d = ''.join(('\n --{0} : {2}{3}' if otd[1] == 'bool' else + '\n --{0}=<{1}> : {2}{3}').format( + otd[0].replace('_', '-'), otd[0], *otd[1:]) + for otd in opt_types_desc if otd[0] not in UNSUPPORTED_OPTS) + + help_short = "Usage:\n tqdm [--help | options]\n" + d = help_short + """ +Options: + -h, --help Print this help and exit. + -v, --version Print version and exit. +""" + d.strip('\n') + '\n' + + # opts = docopt(d, version=__version__) + if any(v in argv for v in ('-v', '--version')): + sys.stdout.write(__version__ + '\n') + sys.exit(0) + elif any(v in argv for v in ('-h', '--help')): + sys.stdout.write(d + '\n') + sys.exit(0) + elif argv and argv[0][:2] != '--': + sys.stderr.write(f"Error:Unknown argument:{argv[0]}\n{help_short}") + + argv = RE_SHLEX.split(' '.join(["tqdm"] + argv)) + opts = dict(zip(argv[1::3], argv[3::3])) + + log.debug(opts) + opts.pop('log', True) + + tqdm_args = {'file': fp} + try: + for (o, v) in opts.items(): + o = o.replace('-', '_') + try: + tqdm_args[o] = cast(v, opt_types[o]) + except KeyError as e: + raise TqdmKeyError(str(e)) + log.debug('args:' + str(tqdm_args)) + + delim_per_char = tqdm_args.pop('bytes', False) + update = tqdm_args.pop('update', False) + update_to = tqdm_args.pop('update_to', False) + if sum((delim_per_char, update, update_to)) > 1: + raise TqdmKeyError("Can only have one of --bytes --update --update_to") + except Exception: + fp.write("\nError:\n" + help_short) + stdin, stdout_write = sys.stdin, sys.stdout.write + for i in stdin: + stdout_write(i) + raise + else: + buf_size = tqdm_args.pop('buf_size', 256) + delim = tqdm_args.pop('delim', b'\\n') + tee = tqdm_args.pop('tee', False) + manpath = tqdm_args.pop('manpath', None) + comppath = tqdm_args.pop('comppath', None) + if tqdm_args.pop('null', False): + class stdout(object): + @staticmethod + def write(_): + pass + else: + stdout = sys.stdout + stdout = getattr(stdout, 'buffer', stdout) + stdin = getattr(sys.stdin, 'buffer', sys.stdin) + if manpath or comppath: + try: # py<3.9 + import importlib_resources as resources + except ImportError: + from importlib import resources + from pathlib import Path + + def cp(name, dst): + """copy resource `name` to `dst`""" + fi = resources.files('tqdm') / name + dst.write_bytes(fi.read_bytes()) + log.info("written:%s", dst) + if manpath is not None: + cp('tqdm.1', Path(manpath) / 'tqdm.1') + if comppath is not None: + cp('completion.sh', Path(comppath) / 'tqdm_completion.sh') + sys.exit(0) + if tee: + stdout_write = stdout.write + fp_write = getattr(fp, 'buffer', fp).write + + class stdout(object): # pylint: disable=function-redefined + @staticmethod + def write(x): + with tqdm.external_write_mode(file=fp): + fp_write(x) + stdout_write(x) + if delim_per_char: + tqdm_args.setdefault('unit', 'B') + tqdm_args.setdefault('unit_scale', True) + tqdm_args.setdefault('unit_divisor', 1024) + log.debug(tqdm_args) + with tqdm(**tqdm_args) as t: + posix_pipe(stdin, stdout, '', buf_size, t.update) + elif delim == b'\\n': + log.debug(tqdm_args) + write = stdout.write + if update or update_to: + with tqdm(**tqdm_args) as t: + if update: + def callback(i): + t.update(numeric(i.decode())) + else: # update_to + def callback(i): + t.update(numeric(i.decode()) - t.n) + for i in stdin: + write(i) + callback(i) + else: + for i in tqdm(stdin, **tqdm_args): + write(i) + else: + log.debug(tqdm_args) + with tqdm(**tqdm_args) as t: + callback_len = False + if update: + def callback(i): + t.update(numeric(i.decode())) + elif update_to: + def callback(i): + t.update(numeric(i.decode()) - t.n) + else: + callback = t.update + callback_len = True + posix_pipe(stdin, stdout, delim, buf_size, callback, callback_len) diff --git a/venv/lib/python3.12/site-packages/tqdm/completion.sh b/venv/lib/python3.12/site-packages/tqdm/completion.sh new file mode 100755 index 00000000..9f61c7f1 --- /dev/null +++ b/venv/lib/python3.12/site-packages/tqdm/completion.sh @@ -0,0 +1,19 @@ +#!/usr/bin/env bash +_tqdm(){ + local cur prv + cur="${COMP_WORDS[COMP_CWORD]}" + prv="${COMP_WORDS[COMP_CWORD - 1]}" + + case ${prv} in + --bar_format|--buf_size|--colour|--comppath|--delay|--delim|--desc|--initial|--lock_args|--manpath|--maxinterval|--mininterval|--miniters|--ncols|--nrows|--position|--postfix|--smoothing|--total|--unit|--unit_divisor) + # await user input + ;; + "--log") + COMPREPLY=($(compgen -W 'CRITICAL FATAL ERROR WARN WARNING INFO DEBUG NOTSET' -- ${cur})) + ;; + *) + COMPREPLY=($(compgen -W '--ascii --bar_format --buf_size --bytes --colour --comppath --delay --delim --desc --disable --dynamic_ncols --help --initial --leave --lock_args --log --manpath --maxinterval --mininterval --miniters --ncols --nrows --null --position --postfix --smoothing --tee --total --unit --unit_divisor --unit_scale --update --update_to --version --write_bytes -h -v' -- ${cur})) + ;; + esac +} +complete -F _tqdm tqdm diff --git a/venv/lib/python3.12/site-packages/tqdm/contrib/__init__.py b/venv/lib/python3.12/site-packages/tqdm/contrib/__init__.py new file mode 100644 index 00000000..d059461f --- /dev/null +++ b/venv/lib/python3.12/site-packages/tqdm/contrib/__init__.py @@ -0,0 +1,92 @@ +""" +Thin wrappers around common functions. + +Subpackages contain potentially unstable extensions. +""" +from warnings import warn + +from ..auto import tqdm as tqdm_auto +from ..std import TqdmDeprecationWarning, tqdm +from ..utils import ObjectWrapper + +__author__ = {"github.com/": ["casperdcl"]} +__all__ = ['tenumerate', 'tzip', 'tmap'] + + +class DummyTqdmFile(ObjectWrapper): + """Dummy file-like that will write to tqdm""" + + def __init__(self, wrapped): + super().__init__(wrapped) + self._buf = [] + + def write(self, x, nolock=False): + nl = b"\n" if isinstance(x, bytes) else "\n" + pre, sep, post = x.rpartition(nl) + if sep: + blank = type(nl)() + tqdm.write(blank.join(self._buf + [pre, sep]), + end=blank, file=self._wrapped, nolock=nolock) + self._buf = [post] + else: + self._buf.append(x) + + def __del__(self): + if self._buf: + blank = type(self._buf[0])() + try: + tqdm.write(blank.join(self._buf), end=blank, file=self._wrapped) + except (OSError, ValueError): + pass + + +def builtin_iterable(func): + """Returns `func`""" + warn("This function has no effect, and will be removed in tqdm==5.0.0", + TqdmDeprecationWarning, stacklevel=2) + return func + + +def tenumerate(iterable, start=0, total=None, tqdm_class=tqdm_auto, **tqdm_kwargs): + """ + Equivalent of `numpy.ndenumerate` or builtin `enumerate`. + + Parameters + ---------- + tqdm_class : [default: tqdm.auto.tqdm]. + """ + try: + import numpy as np + except ImportError: + pass + else: + if isinstance(iterable, np.ndarray): + return tqdm_class(np.ndenumerate(iterable), total=total or iterable.size, + **tqdm_kwargs) + return enumerate(tqdm_class(iterable, total=total, **tqdm_kwargs), start) + + +def tzip(iter1, *iter2plus, **tqdm_kwargs): + """ + Equivalent of builtin `zip`. + + Parameters + ---------- + tqdm_class : [default: tqdm.auto.tqdm]. + """ + kwargs = tqdm_kwargs.copy() + tqdm_class = kwargs.pop("tqdm_class", tqdm_auto) + for i in zip(tqdm_class(iter1, **kwargs), *iter2plus): + yield i + + +def tmap(function, *sequences, **tqdm_kwargs): + """ + Equivalent of builtin `map`. + + Parameters + ---------- + tqdm_class : [default: tqdm.auto.tqdm]. + """ + for i in tzip(*sequences, **tqdm_kwargs): + yield function(*i) diff --git a/venv/lib/python3.12/site-packages/tqdm/contrib/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/tqdm/contrib/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..0ebe118a Binary files /dev/null and b/venv/lib/python3.12/site-packages/tqdm/contrib/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/tqdm/contrib/__pycache__/bells.cpython-312.pyc b/venv/lib/python3.12/site-packages/tqdm/contrib/__pycache__/bells.cpython-312.pyc new file mode 100644 index 00000000..3b7e7e0e Binary files /dev/null and b/venv/lib/python3.12/site-packages/tqdm/contrib/__pycache__/bells.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/tqdm/contrib/__pycache__/concurrent.cpython-312.pyc b/venv/lib/python3.12/site-packages/tqdm/contrib/__pycache__/concurrent.cpython-312.pyc new file mode 100644 index 00000000..379344db Binary files /dev/null and b/venv/lib/python3.12/site-packages/tqdm/contrib/__pycache__/concurrent.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/tqdm/contrib/__pycache__/discord.cpython-312.pyc b/venv/lib/python3.12/site-packages/tqdm/contrib/__pycache__/discord.cpython-312.pyc new file mode 100644 index 00000000..c4d7e398 Binary files /dev/null and b/venv/lib/python3.12/site-packages/tqdm/contrib/__pycache__/discord.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/tqdm/contrib/__pycache__/itertools.cpython-312.pyc b/venv/lib/python3.12/site-packages/tqdm/contrib/__pycache__/itertools.cpython-312.pyc new file mode 100644 index 00000000..ce753bc3 Binary files /dev/null and b/venv/lib/python3.12/site-packages/tqdm/contrib/__pycache__/itertools.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/tqdm/contrib/__pycache__/logging.cpython-312.pyc b/venv/lib/python3.12/site-packages/tqdm/contrib/__pycache__/logging.cpython-312.pyc new file mode 100644 index 00000000..4820b912 Binary files /dev/null and b/venv/lib/python3.12/site-packages/tqdm/contrib/__pycache__/logging.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/tqdm/contrib/__pycache__/slack.cpython-312.pyc b/venv/lib/python3.12/site-packages/tqdm/contrib/__pycache__/slack.cpython-312.pyc new file mode 100644 index 00000000..a76b0bcc Binary files /dev/null and b/venv/lib/python3.12/site-packages/tqdm/contrib/__pycache__/slack.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/tqdm/contrib/__pycache__/telegram.cpython-312.pyc b/venv/lib/python3.12/site-packages/tqdm/contrib/__pycache__/telegram.cpython-312.pyc new file mode 100644 index 00000000..8471a04b Binary files /dev/null and b/venv/lib/python3.12/site-packages/tqdm/contrib/__pycache__/telegram.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/tqdm/contrib/__pycache__/utils_worker.cpython-312.pyc b/venv/lib/python3.12/site-packages/tqdm/contrib/__pycache__/utils_worker.cpython-312.pyc new file mode 100644 index 00000000..7fe226d5 Binary files /dev/null and b/venv/lib/python3.12/site-packages/tqdm/contrib/__pycache__/utils_worker.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/tqdm/contrib/bells.py b/venv/lib/python3.12/site-packages/tqdm/contrib/bells.py new file mode 100644 index 00000000..5b8f4b9e --- /dev/null +++ b/venv/lib/python3.12/site-packages/tqdm/contrib/bells.py @@ -0,0 +1,26 @@ +""" +Even more features than `tqdm.auto` (all the bells & whistles): + +- `tqdm.auto` +- `tqdm.tqdm.pandas` +- `tqdm.contrib.telegram` + + uses `${TQDM_TELEGRAM_TOKEN}` and `${TQDM_TELEGRAM_CHAT_ID}` +- `tqdm.contrib.discord` + + uses `${TQDM_DISCORD_TOKEN}` and `${TQDM_DISCORD_CHANNEL_ID}` +""" +__all__ = ['tqdm', 'trange'] +import warnings +from os import getenv + +if getenv("TQDM_SLACK_TOKEN") and getenv("TQDM_SLACK_CHANNEL"): + from .slack import tqdm, trange +elif getenv("TQDM_TELEGRAM_TOKEN") and getenv("TQDM_TELEGRAM_CHAT_ID"): + from .telegram import tqdm, trange +elif getenv("TQDM_DISCORD_TOKEN") and getenv("TQDM_DISCORD_CHANNEL_ID"): + from .discord import tqdm, trange +else: + from ..auto import tqdm, trange + +with warnings.catch_warnings(): + warnings.simplefilter("ignore", category=FutureWarning) + tqdm.pandas() diff --git a/venv/lib/python3.12/site-packages/tqdm/contrib/concurrent.py b/venv/lib/python3.12/site-packages/tqdm/contrib/concurrent.py new file mode 100644 index 00000000..cd81d622 --- /dev/null +++ b/venv/lib/python3.12/site-packages/tqdm/contrib/concurrent.py @@ -0,0 +1,105 @@ +""" +Thin wrappers around `concurrent.futures`. +""" +from contextlib import contextmanager +from operator import length_hint +from os import cpu_count + +from ..auto import tqdm as tqdm_auto +from ..std import TqdmWarning + +__author__ = {"github.com/": ["casperdcl"]} +__all__ = ['thread_map', 'process_map'] + + +@contextmanager +def ensure_lock(tqdm_class, lock_name=""): + """get (create if necessary) and then restore `tqdm_class`'s lock""" + old_lock = getattr(tqdm_class, '_lock', None) # don't create a new lock + lock = old_lock or tqdm_class.get_lock() # maybe create a new lock + lock = getattr(lock, lock_name, lock) # maybe subtype + tqdm_class.set_lock(lock) + yield lock + if old_lock is None: + del tqdm_class._lock + else: + tqdm_class.set_lock(old_lock) + + +def _executor_map(PoolExecutor, fn, *iterables, **tqdm_kwargs): + """ + Implementation of `thread_map` and `process_map`. + + Parameters + ---------- + tqdm_class : [default: tqdm.auto.tqdm]. + max_workers : [default: min(32, cpu_count() + 4)]. + chunksize : [default: 1]. + lock_name : [default: "":str]. + """ + kwargs = tqdm_kwargs.copy() + if "total" not in kwargs: + kwargs["total"] = length_hint(iterables[0]) + tqdm_class = kwargs.pop("tqdm_class", tqdm_auto) + max_workers = kwargs.pop("max_workers", min(32, cpu_count() + 4)) + chunksize = kwargs.pop("chunksize", 1) + lock_name = kwargs.pop("lock_name", "") + with ensure_lock(tqdm_class, lock_name=lock_name) as lk: + # share lock in case workers are already using `tqdm` + with PoolExecutor(max_workers=max_workers, initializer=tqdm_class.set_lock, + initargs=(lk,)) as ex: + return list(tqdm_class(ex.map(fn, *iterables, chunksize=chunksize), **kwargs)) + + +def thread_map(fn, *iterables, **tqdm_kwargs): + """ + Equivalent of `list(map(fn, *iterables))` + driven by `concurrent.futures.ThreadPoolExecutor`. + + Parameters + ---------- + tqdm_class : optional + `tqdm` class to use for bars [default: tqdm.auto.tqdm]. + max_workers : int, optional + Maximum number of workers to spawn; passed to + `concurrent.futures.ThreadPoolExecutor.__init__`. + [default: max(32, cpu_count() + 4)]. + """ + from concurrent.futures import ThreadPoolExecutor + return _executor_map(ThreadPoolExecutor, fn, *iterables, **tqdm_kwargs) + + +def process_map(fn, *iterables, **tqdm_kwargs): + """ + Equivalent of `list(map(fn, *iterables))` + driven by `concurrent.futures.ProcessPoolExecutor`. + + Parameters + ---------- + tqdm_class : optional + `tqdm` class to use for bars [default: tqdm.auto.tqdm]. + max_workers : int, optional + Maximum number of workers to spawn; passed to + `concurrent.futures.ProcessPoolExecutor.__init__`. + [default: min(32, cpu_count() + 4)]. + chunksize : int, optional + Size of chunks sent to worker processes; passed to + `concurrent.futures.ProcessPoolExecutor.map`. [default: 1]. + lock_name : str, optional + Member of `tqdm_class.get_lock()` to use [default: mp_lock]. + """ + from concurrent.futures import ProcessPoolExecutor + if iterables and "chunksize" not in tqdm_kwargs: + # default `chunksize=1` has poor performance for large iterables + # (most time spent dispatching items to workers). + longest_iterable_len = max(map(length_hint, iterables)) + if longest_iterable_len > 1000: + from warnings import warn + warn("Iterable length %d > 1000 but `chunksize` is not set." + " This may seriously degrade multiprocess performance." + " Set `chunksize=1` or more." % longest_iterable_len, + TqdmWarning, stacklevel=2) + if "lock_name" not in tqdm_kwargs: + tqdm_kwargs = tqdm_kwargs.copy() + tqdm_kwargs["lock_name"] = "mp_lock" + return _executor_map(ProcessPoolExecutor, fn, *iterables, **tqdm_kwargs) diff --git a/venv/lib/python3.12/site-packages/tqdm/contrib/discord.py b/venv/lib/python3.12/site-packages/tqdm/contrib/discord.py new file mode 100644 index 00000000..574baa84 --- /dev/null +++ b/venv/lib/python3.12/site-packages/tqdm/contrib/discord.py @@ -0,0 +1,156 @@ +""" +Sends updates to a Discord bot. + +Usage: +>>> from tqdm.contrib.discord import tqdm, trange +>>> for i in trange(10, token='{token}', channel_id='{channel_id}'): +... ... + +![screenshot](https://tqdm.github.io/img/screenshot-discord.png) +""" +from os import getenv +from warnings import warn + +from requests import Session +from requests.utils import default_user_agent + +from ..auto import tqdm as tqdm_auto +from ..std import TqdmWarning +from ..version import __version__ +from .utils_worker import MonoWorker + +__author__ = {"github.com/": ["casperdcl", "guigoruiz1"]} +__all__ = ['DiscordIO', 'tqdm_discord', 'tdrange', 'tqdm', 'trange'] + + +class DiscordIO(MonoWorker): + """Non-blocking file-like IO using a Discord Bot.""" + API = "https://discord.com/api/v10" + UA = f"tqdm (https://tqdm.github.io, {__version__}) {default_user_agent()}" + + def __init__(self, token, channel_id): + """Creates a new message in the given `channel_id`.""" + super().__init__() + self.token = token + self.channel_id = channel_id + self.session = Session() + self.text = self.__class__.__name__ + self.message_id + + @property + def message_id(self): + if hasattr(self, '_message_id'): + return self._message_id + try: + res = self.session.post( + f'{self.API}/channels/{self.channel_id}/messages', + headers={'Authorization': f'Bot {self.token}', 'User-Agent': self.UA}, + json={'content': f"`{self.text}`"}).json() + except Exception as e: + tqdm_auto.write(str(e)) + else: + if res.get('error_code') == 429: + warn("Creation rate limit: try increasing `mininterval`.", + TqdmWarning, stacklevel=2) + else: + self._message_id = res['id'] + return self._message_id + + def write(self, s): + """Replaces internal `message_id`'s text with `s`.""" + if not s: + s = "..." + s = s.replace('\r', '').strip() + if s == self.text: + return # avoid duplicate message Bot error + message_id = self.message_id + if message_id is None: + return + self.text = s + try: + future = self.submit( + self.session.patch, + f'{self.API}/channels/{self.channel_id}/messages/{message_id}', + headers={'Authorization': f'Bot {self.token}', 'User-Agent': self.UA}, + json={'content': f"`{self.text}`"}) + except Exception as e: + tqdm_auto.write(str(e)) + else: + return future + + def delete(self): + """Deletes internal `message_id`.""" + try: + future = self.submit( + self.session.delete, + f'{self.API}/channels/{self.channel_id}/messages/{self.message_id}', + headers={'Authorization': f'Bot {self.token}', 'User-Agent': self.UA}) + except Exception as e: + tqdm_auto.write(str(e)) + else: + return future + + +class tqdm_discord(tqdm_auto): + """ + Standard `tqdm.auto.tqdm` but also sends updates to a Discord Bot. + May take a few seconds to create (`__init__`). + + - create a discord bot (not public, no requirement of OAuth2 code + grant, only send message permissions) & invite it to a channel: + + - copy the bot `{token}` & `{channel_id}` and paste below + + >>> from tqdm.contrib.discord import tqdm, trange + >>> for i in tqdm(iterable, token='{token}', channel_id='{channel_id}'): + ... ... + """ + def __init__(self, *args, **kwargs): + """ + Parameters + ---------- + token : str, required. Discord bot token + [default: ${TQDM_DISCORD_TOKEN}]. + channel_id : int, required. Discord channel ID + [default: ${TQDM_DISCORD_CHANNEL_ID}]. + + See `tqdm.auto.tqdm.__init__` for other parameters. + """ + if not kwargs.get('disable'): + kwargs = kwargs.copy() + self.dio = DiscordIO( + kwargs.pop('token', getenv('TQDM_DISCORD_TOKEN')), + kwargs.pop('channel_id', getenv('TQDM_DISCORD_CHANNEL_ID'))) + super().__init__(*args, **kwargs) + + def display(self, **kwargs): + super().display(**kwargs) + fmt = self.format_dict + if fmt.get('bar_format', None): + fmt['bar_format'] = fmt['bar_format'].replace( + '', '{bar:10u}').replace('{bar}', '{bar:10u}') + else: + fmt['bar_format'] = '{l_bar}{bar:10u}{r_bar}' + self.dio.write(self.format_meter(**fmt)) + + def clear(self, *args, **kwargs): + super().clear(*args, **kwargs) + if not self.disable: + self.dio.write("") + + def close(self): + if self.disable: + return + super().close() + if not (self.leave or (self.leave is None and self.pos == 0)): + self.dio.delete() + + +def tdrange(*args, **kwargs): + """Shortcut for `tqdm.contrib.discord.tqdm(range(*args), **kwargs)`.""" + return tqdm_discord(range(*args), **kwargs) + + +# Aliases +tqdm = tqdm_discord +trange = tdrange diff --git a/venv/lib/python3.12/site-packages/tqdm/contrib/itertools.py b/venv/lib/python3.12/site-packages/tqdm/contrib/itertools.py new file mode 100644 index 00000000..e67651a4 --- /dev/null +++ b/venv/lib/python3.12/site-packages/tqdm/contrib/itertools.py @@ -0,0 +1,35 @@ +""" +Thin wrappers around `itertools`. +""" +import itertools + +from ..auto import tqdm as tqdm_auto + +__author__ = {"github.com/": ["casperdcl"]} +__all__ = ['product'] + + +def product(*iterables, **tqdm_kwargs): + """ + Equivalent of `itertools.product`. + + Parameters + ---------- + tqdm_class : [default: tqdm.auto.tqdm]. + """ + kwargs = tqdm_kwargs.copy() + tqdm_class = kwargs.pop("tqdm_class", tqdm_auto) + try: + lens = list(map(len, iterables)) + except TypeError: + total = None + else: + total = 1 + for i in lens: + total *= i + kwargs.setdefault("total", total) + with tqdm_class(**kwargs) as t: + it = itertools.product(*iterables) + for i in it: + yield i + t.update() diff --git a/venv/lib/python3.12/site-packages/tqdm/contrib/logging.py b/venv/lib/python3.12/site-packages/tqdm/contrib/logging.py new file mode 100644 index 00000000..e06febe3 --- /dev/null +++ b/venv/lib/python3.12/site-packages/tqdm/contrib/logging.py @@ -0,0 +1,126 @@ +""" +Helper functionality for interoperability with stdlib `logging`. +""" +import logging +import sys +from contextlib import contextmanager + +try: + from typing import Iterator, List, Optional, Type # noqa: F401 +except ImportError: + pass + +from ..std import tqdm as std_tqdm + + +class _TqdmLoggingHandler(logging.StreamHandler): + def __init__( + self, + tqdm_class=std_tqdm # type: Type[std_tqdm] + ): + super().__init__() + self.tqdm_class = tqdm_class + + def emit(self, record): + try: + msg = self.format(record) + self.tqdm_class.write(msg, file=self.stream) + self.flush() + except (KeyboardInterrupt, SystemExit): + raise + except: # noqa pylint: disable=bare-except + self.handleError(record) + + +def _is_console_logging_handler(handler): + return (isinstance(handler, logging.StreamHandler) + and handler.stream in {sys.stdout, sys.stderr}) + + +def _get_first_found_console_logging_handler(handlers): + for handler in handlers: + if _is_console_logging_handler(handler): + return handler + + +@contextmanager +def logging_redirect_tqdm( + loggers=None, # type: Optional[List[logging.Logger]], + tqdm_class=std_tqdm # type: Type[std_tqdm] +): + # type: (...) -> Iterator[None] + """ + Context manager redirecting console logging to `tqdm.write()`, leaving + other logging handlers (e.g. log files) unaffected. + + Parameters + ---------- + loggers : list, optional + Which handlers to redirect (default: [logging.root]). + tqdm_class : optional + + Example + ------- + ```python + import logging + from tqdm import trange + from tqdm.contrib.logging import logging_redirect_tqdm + + LOG = logging.getLogger(__name__) + + if __name__ == '__main__': + logging.basicConfig(level=logging.INFO) + with logging_redirect_tqdm(): + for i in trange(9): + if i == 4: + LOG.info("console logging redirected to `tqdm.write()`") + # logging restored + ``` + """ + if loggers is None: + loggers = [logging.root] + original_handlers_list = [logger.handlers for logger in loggers] + try: + for logger in loggers: + tqdm_handler = _TqdmLoggingHandler(tqdm_class) + orig_handler = _get_first_found_console_logging_handler(logger.handlers) + if orig_handler is not None: + tqdm_handler.setFormatter(orig_handler.formatter) + tqdm_handler.stream = orig_handler.stream + logger.handlers = [ + handler for handler in logger.handlers + if not _is_console_logging_handler(handler)] + [tqdm_handler] + yield + finally: + for logger, original_handlers in zip(loggers, original_handlers_list): + logger.handlers = original_handlers + + +@contextmanager +def tqdm_logging_redirect( + *args, + # loggers=None, # type: Optional[List[logging.Logger]] + # tqdm=None, # type: Optional[Type[tqdm.tqdm]] + **kwargs +): + # type: (...) -> Iterator[None] + """ + Convenience shortcut for: + ```python + with tqdm_class(*args, **tqdm_kwargs) as pbar: + with logging_redirect_tqdm(loggers=loggers, tqdm_class=tqdm_class): + yield pbar + ``` + + Parameters + ---------- + tqdm_class : optional, (default: tqdm.std.tqdm). + loggers : optional, list. + **tqdm_kwargs : passed to `tqdm_class`. + """ + tqdm_kwargs = kwargs.copy() + loggers = tqdm_kwargs.pop('loggers', None) + tqdm_class = tqdm_kwargs.pop('tqdm_class', std_tqdm) + with tqdm_class(*args, **tqdm_kwargs) as pbar: + with logging_redirect_tqdm(loggers=loggers, tqdm_class=tqdm_class): + yield pbar diff --git a/venv/lib/python3.12/site-packages/tqdm/contrib/slack.py b/venv/lib/python3.12/site-packages/tqdm/contrib/slack.py new file mode 100644 index 00000000..9bca8ee9 --- /dev/null +++ b/venv/lib/python3.12/site-packages/tqdm/contrib/slack.py @@ -0,0 +1,120 @@ +""" +Sends updates to a Slack app. + +Usage: +>>> from tqdm.contrib.slack import tqdm, trange +>>> for i in trange(10, token='{token}', channel='{channel}'): +... ... + +![screenshot](https://tqdm.github.io/img/screenshot-slack.png) +""" +import logging +from os import getenv + +try: + from slack_sdk import WebClient +except ImportError: + raise ImportError("Please `pip install slack-sdk`") + +from ..auto import tqdm as tqdm_auto +from .utils_worker import MonoWorker + +__author__ = {"github.com/": ["0x2b3bfa0", "casperdcl"]} +__all__ = ['SlackIO', 'tqdm_slack', 'tsrange', 'tqdm', 'trange'] + + +class SlackIO(MonoWorker): + """Non-blocking file-like IO using a Slack app.""" + def __init__(self, token, channel): + """Creates a new message in the given `channel`.""" + super().__init__() + self.client = WebClient(token=token) + self.text = self.__class__.__name__ + try: + self.message = self.client.chat_postMessage(channel=channel, text=self.text) + except Exception as e: + tqdm_auto.write(str(e)) + self.message = None + + def write(self, s): + """Replaces internal `message`'s text with `s`.""" + if not s: + s = "..." + s = s.replace('\r', '').strip() + if s == self.text: + return # skip duplicate message + message = self.message + if message is None: + return + self.text = s + try: + future = self.submit(self.client.chat_update, channel=message['channel'], + ts=message['ts'], text='`' + s + '`') + except Exception as e: + tqdm_auto.write(str(e)) + else: + return future + + +class tqdm_slack(tqdm_auto): + """ + Standard `tqdm.auto.tqdm` but also sends updates to a Slack app. + May take a few seconds to create (`__init__`). + + - create a Slack app with the `chat:write` scope & invite it to a + channel: + - copy the bot `{token}` & `{channel}` and paste below + >>> from tqdm.contrib.slack import tqdm, trange + >>> for i in tqdm(iterable, token='{token}', channel='{channel}'): + ... ... + """ + def __init__(self, *args, **kwargs): + """ + Parameters + ---------- + token : str, required. Slack token + [default: ${TQDM_SLACK_TOKEN}]. + channel : int, required. Slack channel + [default: ${TQDM_SLACK_CHANNEL}]. + mininterval : float, optional. + Minimum of [default: 1.5] to avoid rate limit. + + See `tqdm.auto.tqdm.__init__` for other parameters. + """ + if not kwargs.get('disable'): + kwargs = kwargs.copy() + logging.getLogger("HTTPClient").setLevel(logging.WARNING) + self.sio = SlackIO( + kwargs.pop('token', getenv("TQDM_SLACK_TOKEN")), + kwargs.pop('channel', getenv("TQDM_SLACK_CHANNEL"))) + kwargs['mininterval'] = max(1.5, kwargs.get('mininterval', 1.5)) + super().__init__(*args, **kwargs) + + def display(self, **kwargs): + super().display(**kwargs) + fmt = self.format_dict + if fmt.get('bar_format', None): + fmt['bar_format'] = fmt['bar_format'].replace( + '', '`{bar:10}`').replace('{bar}', '`{bar:10u}`') + else: + fmt['bar_format'] = '{l_bar}`{bar:10}`{r_bar}' + if fmt['ascii'] is False: + fmt['ascii'] = [":black_square:", ":small_blue_diamond:", ":large_blue_diamond:", + ":large_blue_square:"] + fmt['ncols'] = 336 + self.sio.write(self.format_meter(**fmt)) + + def clear(self, *args, **kwargs): + super().clear(*args, **kwargs) + if not self.disable: + self.sio.write("") + + +def tsrange(*args, **kwargs): + """Shortcut for `tqdm.contrib.slack.tqdm(range(*args), **kwargs)`.""" + return tqdm_slack(range(*args), **kwargs) + + +# Aliases +tqdm = tqdm_slack +trange = tsrange diff --git a/venv/lib/python3.12/site-packages/tqdm/contrib/telegram.py b/venv/lib/python3.12/site-packages/tqdm/contrib/telegram.py new file mode 100644 index 00000000..01915180 --- /dev/null +++ b/venv/lib/python3.12/site-packages/tqdm/contrib/telegram.py @@ -0,0 +1,153 @@ +""" +Sends updates to a Telegram bot. + +Usage: +>>> from tqdm.contrib.telegram import tqdm, trange +>>> for i in trange(10, token='{token}', chat_id='{chat_id}'): +... ... + +![screenshot](https://tqdm.github.io/img/screenshot-telegram.gif) +""" +from os import getenv +from warnings import warn + +from requests import Session + +from ..auto import tqdm as tqdm_auto +from ..std import TqdmWarning +from .utils_worker import MonoWorker + +__author__ = {"github.com/": ["casperdcl"]} +__all__ = ['TelegramIO', 'tqdm_telegram', 'ttgrange', 'tqdm', 'trange'] + + +class TelegramIO(MonoWorker): + """Non-blocking file-like IO using a Telegram Bot.""" + API = 'https://api.telegram.org/bot' + + def __init__(self, token, chat_id): + """Creates a new message in the given `chat_id`.""" + super().__init__() + self.token = token + self.chat_id = chat_id + self.session = Session() + self.text = self.__class__.__name__ + self.message_id + + @property + def message_id(self): + if hasattr(self, '_message_id'): + return self._message_id + try: + res = self.session.post( + self.API + '%s/sendMessage' % self.token, + data={'text': '`' + self.text + '`', 'chat_id': self.chat_id, + 'parse_mode': 'MarkdownV2'}).json() + except Exception as e: + tqdm_auto.write(str(e)) + else: + if res.get('error_code') == 429: + warn("Creation rate limit: try increasing `mininterval`.", + TqdmWarning, stacklevel=2) + else: + self._message_id = res['result']['message_id'] + return self._message_id + + def write(self, s): + """Replaces internal `message_id`'s text with `s`.""" + if not s: + s = "..." + s = s.replace('\r', '').strip() + if s == self.text: + return # avoid duplicate message Bot error + message_id = self.message_id + if message_id is None: + return + self.text = s + try: + future = self.submit( + self.session.post, self.API + '%s/editMessageText' % self.token, + data={'text': '`' + s + '`', 'chat_id': self.chat_id, + 'message_id': message_id, 'parse_mode': 'MarkdownV2'}) + except Exception as e: + tqdm_auto.write(str(e)) + else: + return future + + def delete(self): + """Deletes internal `message_id`.""" + try: + future = self.submit( + self.session.post, self.API + '%s/deleteMessage' % self.token, + data={'chat_id': self.chat_id, 'message_id': self.message_id}) + except Exception as e: + tqdm_auto.write(str(e)) + else: + return future + + +class tqdm_telegram(tqdm_auto): + """ + Standard `tqdm.auto.tqdm` but also sends updates to a Telegram Bot. + May take a few seconds to create (`__init__`). + + - create a bot + - copy its `{token}` + - add the bot to a chat and send it a message such as `/start` + - go to to find out + the `{chat_id}` + - paste the `{token}` & `{chat_id}` below + + >>> from tqdm.contrib.telegram import tqdm, trange + >>> for i in tqdm(iterable, token='{token}', chat_id='{chat_id}'): + ... ... + """ + def __init__(self, *args, **kwargs): + """ + Parameters + ---------- + token : str, required. Telegram token + [default: ${TQDM_TELEGRAM_TOKEN}]. + chat_id : str, required. Telegram chat ID + [default: ${TQDM_TELEGRAM_CHAT_ID}]. + + See `tqdm.auto.tqdm.__init__` for other parameters. + """ + if not kwargs.get('disable'): + kwargs = kwargs.copy() + self.tgio = TelegramIO( + kwargs.pop('token', getenv('TQDM_TELEGRAM_TOKEN')), + kwargs.pop('chat_id', getenv('TQDM_TELEGRAM_CHAT_ID'))) + super().__init__(*args, **kwargs) + + def display(self, **kwargs): + super().display(**kwargs) + fmt = self.format_dict + if fmt.get('bar_format', None): + fmt['bar_format'] = fmt['bar_format'].replace( + '', '{bar:10u}').replace('{bar}', '{bar:10u}') + else: + fmt['bar_format'] = '{l_bar}{bar:10u}{r_bar}' + self.tgio.write(self.format_meter(**fmt)) + + def clear(self, *args, **kwargs): + super().clear(*args, **kwargs) + if not self.disable: + self.tgio.write("") + + def close(self): + if self.disable: + return + super().close() + if not (self.leave or (self.leave is None and self.pos == 0)): + self.tgio.delete() + + +def ttgrange(*args, **kwargs): + """Shortcut for `tqdm.contrib.telegram.tqdm(range(*args), **kwargs)`.""" + return tqdm_telegram(range(*args), **kwargs) + + +# Aliases +tqdm = tqdm_telegram +trange = ttgrange diff --git a/venv/lib/python3.12/site-packages/tqdm/contrib/utils_worker.py b/venv/lib/python3.12/site-packages/tqdm/contrib/utils_worker.py new file mode 100644 index 00000000..2a03a2a8 --- /dev/null +++ b/venv/lib/python3.12/site-packages/tqdm/contrib/utils_worker.py @@ -0,0 +1,38 @@ +""" +IO/concurrency helpers for `tqdm.contrib`. +""" +from collections import deque +from concurrent.futures import ThreadPoolExecutor + +from ..auto import tqdm as tqdm_auto + +__author__ = {"github.com/": ["casperdcl"]} +__all__ = ['MonoWorker'] + + +class MonoWorker(object): + """ + Supports one running task and one waiting task. + The waiting task is the most recent submitted (others are discarded). + """ + def __init__(self): + self.pool = ThreadPoolExecutor(max_workers=1) + self.futures = deque([], 2) + + def submit(self, func, *args, **kwargs): + """`func(*args, **kwargs)` may replace currently waiting task.""" + futures = self.futures + if len(futures) == futures.maxlen: + running = futures.popleft() + if not running.done(): + if len(futures): # clear waiting + waiting = futures.pop() + waiting.cancel() + futures.appendleft(running) # re-insert running + try: + waiting = self.pool.submit(func, *args, **kwargs) + except Exception as e: + tqdm_auto.write(str(e)) + else: + futures.append(waiting) + return waiting diff --git a/venv/lib/python3.12/site-packages/tqdm/dask.py b/venv/lib/python3.12/site-packages/tqdm/dask.py new file mode 100644 index 00000000..57f1b668 --- /dev/null +++ b/venv/lib/python3.12/site-packages/tqdm/dask.py @@ -0,0 +1,44 @@ +from functools import partial + +from dask.callbacks import Callback + +from .auto import tqdm as tqdm_auto + +__author__ = {"github.com/": ["casperdcl"]} +__all__ = ['TqdmCallback'] + + +class TqdmCallback(Callback): + """Dask callback for task progress.""" + def __init__(self, start=None, pretask=None, tqdm_class=tqdm_auto, + **tqdm_kwargs): + """ + Parameters + ---------- + tqdm_class : optional + `tqdm` class to use for bars [default: `tqdm.auto.tqdm`]. + tqdm_kwargs : optional + Any other arguments used for all bars. + """ + super().__init__(start=start, pretask=pretask) + if tqdm_kwargs: + tqdm_class = partial(tqdm_class, **tqdm_kwargs) + self.tqdm_class = tqdm_class + + def _start_state(self, _, state): + self.pbar = self.tqdm_class(total=sum( + len(state[k]) for k in ['ready', 'waiting', 'running', 'finished'])) + + def _posttask(self, *_, **__): + self.pbar.update() + + def _finish(self, *_, **__): + self.pbar.close() + + def display(self): + """Displays in the current cell in Notebooks.""" + container = getattr(self.bar, 'container', None) + if container is None: + return + from .notebook import display + display(container) diff --git a/venv/lib/python3.12/site-packages/tqdm/gui.py b/venv/lib/python3.12/site-packages/tqdm/gui.py new file mode 100644 index 00000000..cb52fb91 --- /dev/null +++ b/venv/lib/python3.12/site-packages/tqdm/gui.py @@ -0,0 +1,179 @@ +""" +Matplotlib GUI progressbar decorator for iterators. + +Usage: +>>> from tqdm.gui import trange, tqdm +>>> for i in trange(10): +... ... +""" +# future division is important to divide integers and get as +# a result precise floating numbers (instead of truncated int) +import re +from warnings import warn + +# to inherit from the tqdm class +from .std import TqdmExperimentalWarning +from .std import tqdm as std_tqdm + +# import compatibility functions and utilities + +__author__ = {"github.com/": ["casperdcl", "lrq3000"]} +__all__ = ['tqdm_gui', 'tgrange', 'tqdm', 'trange'] + + +class tqdm_gui(std_tqdm): # pragma: no cover + """Experimental Matplotlib GUI version of tqdm!""" + # TODO: @classmethod: write() on GUI? + def __init__(self, *args, **kwargs): + from collections import deque + + import matplotlib as mpl + import matplotlib.pyplot as plt + kwargs = kwargs.copy() + kwargs['gui'] = True + colour = kwargs.pop('colour', 'g') + super().__init__(*args, **kwargs) + + if self.disable: + return + + warn("GUI is experimental/alpha", TqdmExperimentalWarning, stacklevel=2) + self.mpl = mpl + self.plt = plt + + # Remember if external environment uses toolbars + self.toolbar = self.mpl.rcParams['toolbar'] + self.mpl.rcParams['toolbar'] = 'None' + + self.mininterval = max(self.mininterval, 0.5) + self.fig, ax = plt.subplots(figsize=(9, 2.2)) + # self.fig.subplots_adjust(bottom=0.2) + total = self.__len__() # avoids TypeError on None #971 + if total is not None: + self.xdata = [] + self.ydata = [] + self.zdata = [] + else: + self.xdata = deque([]) + self.ydata = deque([]) + self.zdata = deque([]) + self.line1, = ax.plot(self.xdata, self.ydata, color='b') + self.line2, = ax.plot(self.xdata, self.zdata, color='k') + ax.set_ylim(0, 0.001) + if total is not None: + ax.set_xlim(0, 100) + ax.set_xlabel("percent") + self.fig.legend((self.line1, self.line2), ("cur", "est"), + loc='center right') + # progressbar + self.hspan = plt.axhspan(0, 0.001, xmin=0, xmax=0, color=colour) + else: + # ax.set_xlim(-60, 0) + ax.set_xlim(0, 60) + ax.invert_xaxis() + ax.set_xlabel("seconds") + ax.legend(("cur", "est"), loc='lower left') + ax.grid() + # ax.set_xlabel('seconds') + ax.set_ylabel((self.unit if self.unit else "it") + "/s") + if self.unit_scale: + plt.ticklabel_format(style='sci', axis='y', scilimits=(0, 0)) + ax.yaxis.get_offset_text().set_x(-0.15) + + # Remember if external environment is interactive + self.wasion = plt.isinteractive() + plt.ion() + self.ax = ax + + def close(self): + if self.disable: + return + + self.disable = True + + with self.get_lock(): + self._instances.remove(self) + + # Restore toolbars + self.mpl.rcParams['toolbar'] = self.toolbar + # Return to non-interactive mode + if not self.wasion: + self.plt.ioff() + if self.leave: + self.display() + else: + self.plt.close(self.fig) + + def clear(self, *_, **__): + pass + + def display(self, *_, **__): + n = self.n + cur_t = self._time() + elapsed = cur_t - self.start_t + delta_it = n - self.last_print_n + delta_t = cur_t - self.last_print_t + + # Inline due to multiple calls + total = self.total + xdata = self.xdata + ydata = self.ydata + zdata = self.zdata + ax = self.ax + line1 = self.line1 + line2 = self.line2 + hspan = getattr(self, 'hspan', None) + # instantaneous rate + y = delta_it / delta_t + # overall rate + z = n / elapsed + # update line data + xdata.append(n * 100.0 / total if total else cur_t) + ydata.append(y) + zdata.append(z) + + # Discard old values + # xmin, xmax = ax.get_xlim() + # if (not total) and elapsed > xmin * 1.1: + if (not total) and elapsed > 66: + xdata.popleft() + ydata.popleft() + zdata.popleft() + + ymin, ymax = ax.get_ylim() + if y > ymax or z > ymax: + ymax = 1.1 * y + ax.set_ylim(ymin, ymax) + ax.figure.canvas.draw() + + if total: + line1.set_data(xdata, ydata) + line2.set_data(xdata, zdata) + if hspan: + hspan.set_xy((0, ymin)) + hspan.set_height(ymax - ymin) + hspan.set_width(n / total) + else: + t_ago = [cur_t - i for i in xdata] + line1.set_data(t_ago, ydata) + line2.set_data(t_ago, zdata) + + d = self.format_dict + # remove {bar} + d['bar_format'] = (d['bar_format'] or "{l_bar}{r_bar}").replace( + "{bar}", "") + msg = self.format_meter(**d) + if '' in msg: + msg = "".join(re.split(r'\|?\|?', msg, maxsplit=1)) + ax.set_title(msg, fontname="DejaVu Sans Mono", fontsize=11) + self.plt.pause(1e-9) + + +def tgrange(*args, **kwargs): + """Shortcut for `tqdm.gui.tqdm(range(*args), **kwargs)`.""" + return tqdm_gui(range(*args), **kwargs) + + +# Aliases +tqdm = tqdm_gui +trange = tgrange diff --git a/venv/lib/python3.12/site-packages/tqdm/keras.py b/venv/lib/python3.12/site-packages/tqdm/keras.py new file mode 100644 index 00000000..cce9467c --- /dev/null +++ b/venv/lib/python3.12/site-packages/tqdm/keras.py @@ -0,0 +1,122 @@ +from copy import copy +from functools import partial + +from .auto import tqdm as tqdm_auto + +try: + import keras +except (ImportError, AttributeError) as e: + try: + from tensorflow import keras + except ImportError: + raise e +__author__ = {"github.com/": ["casperdcl"]} +__all__ = ['TqdmCallback'] + + +class TqdmCallback(keras.callbacks.Callback): + """Keras callback for epoch and batch progress.""" + @staticmethod + def bar2callback(bar, pop=None, delta=(lambda logs: 1)): + def callback(_, logs=None): + n = delta(logs) + if logs: + if pop: + logs = copy(logs) + [logs.pop(i, 0) for i in pop] + bar.set_postfix(logs, refresh=False) + bar.update(n) + + return callback + + def __init__(self, epochs=None, data_size=None, batch_size=None, verbose=1, + tqdm_class=tqdm_auto, **tqdm_kwargs): + """ + Parameters + ---------- + epochs : int, optional + data_size : int, optional + Number of training pairs. + batch_size : int, optional + Number of training pairs per batch. + verbose : int + 0: epoch, 1: batch (transient), 2: batch. [default: 1]. + Will be set to `0` unless both `data_size` and `batch_size` + are given. + tqdm_class : optional + `tqdm` class to use for bars [default: `tqdm.auto.tqdm`]. + tqdm_kwargs : optional + Any other arguments used for all bars. + """ + if tqdm_kwargs: + tqdm_class = partial(tqdm_class, **tqdm_kwargs) + self.tqdm_class = tqdm_class + self.epoch_bar = tqdm_class(total=epochs, unit='epoch') + self.on_epoch_end = self.bar2callback(self.epoch_bar) + if data_size and batch_size: + self.batches = batches = (data_size + batch_size - 1) // batch_size + else: + self.batches = batches = None + self.verbose = verbose + if verbose == 1: + self.batch_bar = tqdm_class(total=batches, unit='batch', leave=False) + self.on_batch_end = self.bar2callback( + self.batch_bar, pop=['batch', 'size'], + delta=lambda logs: logs.get('size', 1)) + + def on_train_begin(self, *_, **__): + params = self.params.get + auto_total = params('epochs', params('nb_epoch', None)) + if auto_total is not None and auto_total != self.epoch_bar.total: + self.epoch_bar.reset(total=auto_total) + + def on_epoch_begin(self, epoch, *_, **__): + if self.epoch_bar.n < epoch: + ebar = self.epoch_bar + ebar.n = ebar.last_print_n = ebar.initial = epoch + if self.verbose: + params = self.params.get + total = params('samples', params( + 'nb_sample', params('steps', None))) or self.batches + if self.verbose == 2: + if hasattr(self, 'batch_bar'): + self.batch_bar.close() + self.batch_bar = self.tqdm_class( + total=total, unit='batch', leave=True, + unit_scale=1 / (params('batch_size', 1) or 1)) + self.on_batch_end = self.bar2callback( + self.batch_bar, pop=['batch', 'size'], + delta=lambda logs: logs.get('size', 1)) + elif self.verbose == 1: + self.batch_bar.unit_scale = 1 / (params('batch_size', 1) or 1) + self.batch_bar.reset(total=total) + else: + raise KeyError('Unknown verbosity') + + def on_train_end(self, *_, **__): + if hasattr(self, 'batch_bar'): + self.batch_bar.close() + self.epoch_bar.close() + + def display(self): + """Displays in the current cell in Notebooks.""" + container = getattr(self.epoch_bar, 'container', None) + if container is None: + return + from .notebook import display + display(container) + batch_bar = getattr(self, 'batch_bar', None) + if batch_bar is not None: + display(batch_bar.container) + + @staticmethod + def _implements_train_batch_hooks(): + return True + + @staticmethod + def _implements_test_batch_hooks(): + return True + + @staticmethod + def _implements_predict_batch_hooks(): + return True diff --git a/venv/lib/python3.12/site-packages/tqdm/notebook.py b/venv/lib/python3.12/site-packages/tqdm/notebook.py new file mode 100644 index 00000000..77b91bdd --- /dev/null +++ b/venv/lib/python3.12/site-packages/tqdm/notebook.py @@ -0,0 +1,317 @@ +""" +IPython/Jupyter Notebook progressbar decorator for iterators. +Includes a default `range` iterator printing to `stderr`. + +Usage: +>>> from tqdm.notebook import trange, tqdm +>>> for i in trange(10): +... ... +""" +# import compatibility functions and utilities +import re +import sys +from html import escape +from weakref import proxy + +# to inherit from the tqdm class +from .std import tqdm as std_tqdm + +if True: # pragma: no cover + # import IPython/Jupyter base widget and display utilities + IPY = 0 + try: # IPython 4.x + import ipywidgets + IPY = 4 + except ImportError: # IPython 3.x / 2.x + IPY = 32 + import warnings + with warnings.catch_warnings(): + warnings.filterwarnings( + 'ignore', message=".*The `IPython.html` package has been deprecated.*") + try: + import IPython.html.widgets as ipywidgets # NOQA: F401 + except ImportError: + pass + + try: # IPython 4.x / 3.x + if IPY == 32: + from IPython.html.widgets import HTML + from IPython.html.widgets import FloatProgress as IProgress + from IPython.html.widgets import HBox + IPY = 3 + else: + from ipywidgets import HTML + from ipywidgets import FloatProgress as IProgress + from ipywidgets import HBox + except ImportError: + try: # IPython 2.x + from IPython.html.widgets import HTML + from IPython.html.widgets import ContainerWidget as HBox + from IPython.html.widgets import FloatProgressWidget as IProgress + IPY = 2 + except ImportError: + IPY = 0 + IProgress = None + HBox = object + + try: + from IPython.display import display # , clear_output + except ImportError: + pass + +__author__ = {"github.com/": ["lrq3000", "casperdcl", "alexanderkuk"]} +__all__ = ['tqdm_notebook', 'tnrange', 'tqdm', 'trange'] +WARN_NOIPYW = ("IProgress not found. Please update jupyter and ipywidgets." + " See https://ipywidgets.readthedocs.io/en/stable" + "/user_install.html") + + +class TqdmHBox(HBox): + """`ipywidgets.HBox` with a pretty representation""" + def _json_(self, pretty=None): + pbar = getattr(self, 'pbar', None) + if pbar is None: + return {} + d = pbar.format_dict + if pretty is not None: + d["ascii"] = not pretty + return d + + def __repr__(self, pretty=False): + pbar = getattr(self, 'pbar', None) + if pbar is None: + return super().__repr__() + return pbar.format_meter(**self._json_(pretty)) + + def _repr_pretty_(self, pp, *_, **__): + pp.text(self.__repr__(True)) + + +class tqdm_notebook(std_tqdm): + """ + Experimental IPython/Jupyter Notebook widget using tqdm! + """ + @staticmethod + def status_printer(_, total=None, desc=None, ncols=None): + """ + Manage the printing of an IPython/Jupyter Notebook progress bar widget. + """ + # Fallback to text bar if there's no total + # DEPRECATED: replaced with an 'info' style bar + # if not total: + # return super(tqdm_notebook, tqdm_notebook).status_printer(file) + + # fp = file + + # Prepare IPython progress bar + if IProgress is None: # #187 #451 #558 #872 + raise ImportError(WARN_NOIPYW) + if total: + pbar = IProgress(min=0, max=total) + else: # No total? Show info style bar with no progress tqdm status + pbar = IProgress(min=0, max=1) + pbar.value = 1 + pbar.bar_style = 'info' + if ncols is None: + pbar.layout.width = "20px" + + ltext = HTML() + rtext = HTML() + if desc: + ltext.value = desc + container = TqdmHBox(children=[ltext, pbar, rtext]) + # Prepare layout + if ncols is not None: # use default style of ipywidgets + # ncols could be 100, "100px", "100%" + ncols = str(ncols) # ipywidgets only accepts string + try: + if int(ncols) > 0: # isnumeric and positive + ncols += 'px' + except ValueError: + pass + pbar.layout.flex = '2' + container.layout.width = ncols + container.layout.display = 'inline-flex' + container.layout.flex_flow = 'row wrap' + + return container + + def display(self, msg=None, pos=None, + # additional signals + close=False, bar_style=None, check_delay=True): + # Note: contrary to native tqdm, msg='' does NOT clear bar + # goal is to keep all infos if error happens so user knows + # at which iteration the loop failed. + + # Clear previous output (really necessary?) + # clear_output(wait=1) + + if not msg and not close: + d = self.format_dict + # remove {bar} + d['bar_format'] = (d['bar_format'] or "{l_bar}{r_bar}").replace( + "{bar}", "") + msg = self.format_meter(**d) + + ltext, pbar, rtext = self.container.children + pbar.value = self.n + + if msg: + msg = msg.replace(' ', u'\u2007') # fix html space padding + # html escape special characters (like '&') + if '' in msg: + left, right = map(escape, re.split(r'\|?\|?', msg, maxsplit=1)) + else: + left, right = '', escape(msg) + + # Update description + ltext.value = left + # never clear the bar (signal: msg='') + if right: + rtext.value = right + + # Change bar style + if bar_style: + # Hack-ish way to avoid the danger bar_style being overridden by + # success because the bar gets closed after the error... + if pbar.bar_style != 'danger' or bar_style != 'success': + pbar.bar_style = bar_style + + # Special signal to close the bar + if close and pbar.bar_style != 'danger': # hide only if no error + try: + self.container.close() + except AttributeError: + self.container.visible = False + self.container.layout.visibility = 'hidden' # IPYW>=8 + + if check_delay and self.delay > 0 and not self.displayed: + display(self.container) + self.displayed = True + + @property + def colour(self): + if hasattr(self, 'container'): + return self.container.children[-2].style.bar_color + + @colour.setter + def colour(self, bar_color): + if hasattr(self, 'container'): + self.container.children[-2].style.bar_color = bar_color + + def __init__(self, *args, **kwargs): + """ + Supports the usual `tqdm.tqdm` parameters as well as those listed below. + + Parameters + ---------- + display : Whether to call `display(self.container)` immediately + [default: True]. + """ + kwargs = kwargs.copy() + # Setup default output + file_kwarg = kwargs.get('file', sys.stderr) + if file_kwarg is sys.stderr or file_kwarg is None: + kwargs['file'] = sys.stdout # avoid the red block in IPython + + # Initialize parent class + avoid printing by using gui=True + kwargs['gui'] = True + # convert disable = None to False + kwargs['disable'] = bool(kwargs.get('disable', False)) + colour = kwargs.pop('colour', None) + display_here = kwargs.pop('display', True) + super().__init__(*args, **kwargs) + if self.disable or not kwargs['gui']: + self.disp = lambda *_, **__: None + return + + # Get bar width + self.ncols = '100%' if self.dynamic_ncols else kwargs.get("ncols", None) + + # Replace with IPython progress bar display (with correct total) + unit_scale = 1 if self.unit_scale is True else self.unit_scale or 1 + total = self.total * unit_scale if self.total else self.total + self.container = self.status_printer(self.fp, total, self.desc, self.ncols) + self.container.pbar = proxy(self) + self.displayed = False + if display_here and self.delay <= 0: + display(self.container) + self.displayed = True + self.disp = self.display + self.colour = colour + + # Print initial bar state + if not self.disable: + self.display(check_delay=False) + + def __iter__(self): + try: + it = super().__iter__() + for obj in it: + # return super(tqdm...) will not catch exception + yield obj + # NB: except ... [ as ...] breaks IPython async KeyboardInterrupt + except: # NOQA + self.disp(bar_style='danger') + raise + # NB: don't `finally: close()` + # since this could be a shared bar which the user will `reset()` + + def update(self, n=1): + try: + return super().update(n=n) + # NB: except ... [ as ...] breaks IPython async KeyboardInterrupt + except: # NOQA + # cannot catch KeyboardInterrupt when using manual tqdm + # as the interrupt will most likely happen on another statement + self.disp(bar_style='danger') + raise + # NB: don't `finally: close()` + # since this could be a shared bar which the user will `reset()` + + def close(self): + if self.disable: + return + super().close() + # Try to detect if there was an error or KeyboardInterrupt + # in manual mode: if n < total, things probably got wrong + if self.total and self.n < self.total: + self.disp(bar_style='danger', check_delay=False) + else: + if self.leave: + self.disp(bar_style='success', check_delay=False) + else: + self.disp(close=True, check_delay=False) + + def clear(self, *_, **__): + pass + + def reset(self, total=None): + """ + Resets to 0 iterations for repeated use. + + Consider combining with `leave=True`. + + Parameters + ---------- + total : int or float, optional. Total to use for the new bar. + """ + if self.disable: + return super().reset(total=total) + _, pbar, _ = self.container.children + pbar.bar_style = '' + if total is not None: + pbar.max = total + if not self.total and self.ncols is None: # no longer unknown total + pbar.layout.width = None # reset width + return super().reset(total=total) + + +def tnrange(*args, **kwargs): + """Shortcut for `tqdm.notebook.tqdm(range(*args), **kwargs)`.""" + return tqdm_notebook(range(*args), **kwargs) + + +# Aliases +tqdm = tqdm_notebook +trange = tnrange diff --git a/venv/lib/python3.12/site-packages/tqdm/rich.py b/venv/lib/python3.12/site-packages/tqdm/rich.py new file mode 100644 index 00000000..3d392eda --- /dev/null +++ b/venv/lib/python3.12/site-packages/tqdm/rich.py @@ -0,0 +1,151 @@ +""" +`rich.progress` decorator for iterators. + +Usage: +>>> from tqdm.rich import trange, tqdm +>>> for i in trange(10): +... ... +""" +from warnings import warn + +from rich.progress import ( + BarColumn, Progress, ProgressColumn, Text, TimeElapsedColumn, TimeRemainingColumn, filesize) + +from .std import TqdmExperimentalWarning +from .std import tqdm as std_tqdm + +__author__ = {"github.com/": ["casperdcl"]} +__all__ = ['tqdm_rich', 'trrange', 'tqdm', 'trange'] + + +class FractionColumn(ProgressColumn): + """Renders completed/total, e.g. '0.5/2.3 G'.""" + def __init__(self, unit_scale=False, unit_divisor=1000): + self.unit_scale = unit_scale + self.unit_divisor = unit_divisor + super().__init__() + + def render(self, task): + """Calculate common unit for completed and total.""" + completed = int(task.completed) + total = int(task.total) + if self.unit_scale: + unit, suffix = filesize.pick_unit_and_suffix( + total, + ["", "K", "M", "G", "T", "P", "E", "Z", "Y"], + self.unit_divisor, + ) + else: + unit, suffix = filesize.pick_unit_and_suffix(total, [""], 1) + precision = 0 if unit == 1 else 1 + return Text( + f"{completed/unit:,.{precision}f}/{total/unit:,.{precision}f} {suffix}", + style="progress.download") + + +class RateColumn(ProgressColumn): + """Renders human readable transfer speed.""" + def __init__(self, unit="", unit_scale=False, unit_divisor=1000): + self.unit = unit + self.unit_scale = unit_scale + self.unit_divisor = unit_divisor + super().__init__() + + def render(self, task): + """Show data transfer speed.""" + speed = task.speed + if speed is None: + return Text(f"? {self.unit}/s", style="progress.data.speed") + if self.unit_scale: + unit, suffix = filesize.pick_unit_and_suffix( + speed, + ["", "K", "M", "G", "T", "P", "E", "Z", "Y"], + self.unit_divisor, + ) + else: + unit, suffix = filesize.pick_unit_and_suffix(speed, [""], 1) + precision = 0 if unit == 1 else 1 + return Text(f"{speed/unit:,.{precision}f} {suffix}{self.unit}/s", + style="progress.data.speed") + + +class tqdm_rich(std_tqdm): # pragma: no cover + """Experimental rich.progress GUI version of tqdm!""" + # TODO: @classmethod: write()? + def __init__(self, *args, **kwargs): + """ + This class accepts the following parameters *in addition* to + the parameters accepted by `tqdm`. + + Parameters + ---------- + progress : tuple, optional + arguments for `rich.progress.Progress()`. + options : dict, optional + keyword arguments for `rich.progress.Progress()`. + """ + kwargs = kwargs.copy() + kwargs['gui'] = True + # convert disable = None to False + kwargs['disable'] = bool(kwargs.get('disable', False)) + progress = kwargs.pop('progress', None) + options = kwargs.pop('options', {}).copy() + super().__init__(*args, **kwargs) + + if self.disable: + return + + warn("rich is experimental/alpha", TqdmExperimentalWarning, stacklevel=2) + d = self.format_dict + if progress is None: + progress = ( + "[progress.description]{task.description}" + "[progress.percentage]{task.percentage:>4.0f}%", + BarColumn(bar_width=None), + FractionColumn( + unit_scale=d['unit_scale'], unit_divisor=d['unit_divisor']), + "[", TimeElapsedColumn(), "<", TimeRemainingColumn(), + ",", RateColumn(unit=d['unit'], unit_scale=d['unit_scale'], + unit_divisor=d['unit_divisor']), "]" + ) + options.setdefault('transient', not self.leave) + self._prog = Progress(*progress, **options) + self._prog.__enter__() + self._task_id = self._prog.add_task(self.desc or "", **d) + + def close(self): + if self.disable: + return + self.display() # print 100%, vis #1306 + super().close() + self._prog.__exit__(None, None, None) + + def clear(self, *_, **__): + pass + + def display(self, *_, **__): + if not hasattr(self, '_prog'): + return + self._prog.update(self._task_id, completed=self.n, description=self.desc) + + def reset(self, total=None): + """ + Resets to 0 iterations for repeated use. + + Parameters + ---------- + total : int or float, optional. Total to use for the new bar. + """ + if hasattr(self, '_prog'): + self._prog.reset(total=total) + super().reset(total=total) + + +def trrange(*args, **kwargs): + """Shortcut for `tqdm.rich.tqdm(range(*args), **kwargs)`.""" + return tqdm_rich(range(*args), **kwargs) + + +# Aliases +tqdm = tqdm_rich +trange = trrange diff --git a/venv/lib/python3.12/site-packages/tqdm/std.py b/venv/lib/python3.12/site-packages/tqdm/std.py new file mode 100644 index 00000000..e91ad309 --- /dev/null +++ b/venv/lib/python3.12/site-packages/tqdm/std.py @@ -0,0 +1,1524 @@ +""" +Customisable progressbar decorator for iterators. +Includes a default `range` iterator printing to `stderr`. + +Usage: +>>> from tqdm import trange, tqdm +>>> for i in trange(10): +... ... +""" +import sys +from collections import OrderedDict, defaultdict +from contextlib import contextmanager +from datetime import datetime, timedelta, timezone +from numbers import Number +from time import time +from warnings import warn +from weakref import WeakSet + +from ._monitor import TMonitor +from .utils import ( + CallbackIOWrapper, Comparable, DisableOnWriteError, FormatReplace, SimpleTextIOWrapper, + _is_ascii, _screen_shape_wrapper, _supports_unicode, _term_move_up, disp_len, disp_trim, + envwrap) + +__author__ = "https://github.com/tqdm/tqdm#contributions" +__all__ = ['tqdm', 'trange', + 'TqdmTypeError', 'TqdmKeyError', 'TqdmWarning', + 'TqdmExperimentalWarning', 'TqdmDeprecationWarning', + 'TqdmMonitorWarning'] + + +class TqdmTypeError(TypeError): + pass + + +class TqdmKeyError(KeyError): + pass + + +class TqdmWarning(Warning): + """base class for all tqdm warnings. + + Used for non-external-code-breaking errors, such as garbled printing. + """ + def __init__(self, msg, fp_write=None, *a, **k): + if fp_write is not None: + fp_write("\n" + self.__class__.__name__ + ": " + str(msg).rstrip() + '\n') + else: + super().__init__(msg, *a, **k) + + +class TqdmExperimentalWarning(TqdmWarning, FutureWarning): + """beta feature, unstable API and behaviour""" + pass + + +class TqdmDeprecationWarning(TqdmWarning, DeprecationWarning): + # not suppressed if raised + pass + + +class TqdmMonitorWarning(TqdmWarning, RuntimeWarning): + """tqdm monitor errors which do not affect external functionality""" + pass + + +def TRLock(*args, **kwargs): + """threading RLock""" + try: + from threading import RLock + return RLock(*args, **kwargs) + except (ImportError, OSError): # pragma: no cover + pass + + +class TqdmDefaultWriteLock(object): + """ + Provide a default write lock for thread and multiprocessing safety. + Works only on platforms supporting `fork` (so Windows is excluded). + You must initialise a `tqdm` or `TqdmDefaultWriteLock` instance + before forking in order for the write lock to work. + On Windows, you need to supply the lock from the parent to the children as + an argument to joblib or the parallelism lib you use. + """ + # global thread lock so no setup required for multithreading. + # NB: Do not create multiprocessing lock as it sets the multiprocessing + # context, disallowing `spawn()`/`forkserver()` + th_lock = TRLock() + + def __init__(self): + # Create global parallelism locks to avoid racing issues with parallel + # bars works only if fork available (Linux/MacOSX, but not Windows) + cls = type(self) + root_lock = cls.th_lock + if root_lock is not None: + root_lock.acquire() + cls.create_mp_lock() + self.locks = [lk for lk in [cls.mp_lock, cls.th_lock] if lk is not None] + if root_lock is not None: + root_lock.release() + + def acquire(self, *a, **k): + for lock in self.locks: + lock.acquire(*a, **k) + + def release(self): + for lock in self.locks[::-1]: # Release in inverse order of acquisition + lock.release() + + def __enter__(self): + self.acquire() + + def __exit__(self, *exc): + self.release() + + @classmethod + def create_mp_lock(cls): + if not hasattr(cls, 'mp_lock'): + try: + from multiprocessing import RLock + cls.mp_lock = RLock() + except (ImportError, OSError): # pragma: no cover + cls.mp_lock = None + + @classmethod + def create_th_lock(cls): + assert hasattr(cls, 'th_lock') + warn("create_th_lock not needed anymore", TqdmDeprecationWarning, stacklevel=2) + + +class Bar(object): + """ + `str.format`-able bar with format specifiers: `[width][type]` + + - `width` + + unspecified (default): use `self.default_len` + + `int >= 0`: overrides `self.default_len` + + `int < 0`: subtract from `self.default_len` + - `type` + + `a`: ascii (`charset=self.ASCII` override) + + `u`: unicode (`charset=self.UTF` override) + + `b`: blank (`charset=" "` override) + """ + ASCII = " 123456789#" + UTF = u" " + u''.join(map(chr, range(0x258F, 0x2587, -1))) + BLANK = " " + COLOUR_RESET = '\x1b[0m' + COLOUR_RGB = '\x1b[38;2;%d;%d;%dm' + COLOURS = {'BLACK': '\x1b[30m', 'RED': '\x1b[31m', 'GREEN': '\x1b[32m', + 'YELLOW': '\x1b[33m', 'BLUE': '\x1b[34m', 'MAGENTA': '\x1b[35m', + 'CYAN': '\x1b[36m', 'WHITE': '\x1b[37m'} + + def __init__(self, frac, default_len=10, charset=UTF, colour=None): + if not 0 <= frac <= 1: + warn("clamping frac to range [0, 1]", TqdmWarning, stacklevel=2) + frac = max(0, min(1, frac)) + assert default_len > 0 + self.frac = frac + self.default_len = default_len + self.charset = charset + self.colour = colour + + @property + def colour(self): + return self._colour + + @colour.setter + def colour(self, value): + if not value: + self._colour = None + return + try: + if value.upper() in self.COLOURS: + self._colour = self.COLOURS[value.upper()] + elif value[0] == '#' and len(value) == 7: + self._colour = self.COLOUR_RGB % tuple( + int(i, 16) for i in (value[1:3], value[3:5], value[5:7])) + else: + raise KeyError + except (KeyError, AttributeError): + warn("Unknown colour (%s); valid choices: [hex (#00ff00), %s]" % ( + value, ", ".join(self.COLOURS)), + TqdmWarning, stacklevel=2) + self._colour = None + + def __format__(self, format_spec): + if format_spec: + _type = format_spec[-1].lower() + try: + charset = {'a': self.ASCII, 'u': self.UTF, 'b': self.BLANK}[_type] + except KeyError: + charset = self.charset + else: + format_spec = format_spec[:-1] + if format_spec: + N_BARS = int(format_spec) + if N_BARS < 0: + N_BARS += self.default_len + else: + N_BARS = self.default_len + else: + charset = self.charset + N_BARS = self.default_len + + nsyms = len(charset) - 1 + bar_length, frac_bar_length = divmod(int(self.frac * N_BARS * nsyms), nsyms) + + res = charset[-1] * bar_length + if bar_length < N_BARS: # whitespace padding + res = res + charset[frac_bar_length] + charset[0] * (N_BARS - bar_length - 1) + return self.colour + res + self.COLOUR_RESET if self.colour else res + + +class EMA(object): + """ + Exponential moving average: smoothing to give progressively lower + weights to older values. + + Parameters + ---------- + smoothing : float, optional + Smoothing factor in range [0, 1], [default: 0.3]. + Increase to give more weight to recent values. + Ranges from 0 (yields old value) to 1 (yields new value). + """ + def __init__(self, smoothing=0.3): + self.alpha = smoothing + self.last = 0 + self.calls = 0 + + def __call__(self, x=None): + """ + Parameters + ---------- + x : float + New value to include in EMA. + """ + beta = 1 - self.alpha + if x is not None: + self.last = self.alpha * x + beta * self.last + self.calls += 1 + return self.last / (1 - beta ** self.calls) if self.calls else self.last + + +class tqdm(Comparable): + """ + Decorate an iterable object, returning an iterator which acts exactly + like the original iterable, but prints a dynamically updating + progressbar every time a value is requested. + + Parameters + ---------- + iterable : iterable, optional + Iterable to decorate with a progressbar. + Leave blank to manually manage the updates. + desc : str, optional + Prefix for the progressbar. + total : int or float, optional + The number of expected iterations. If unspecified, + len(iterable) is used if possible. If float("inf") or as a last + resort, only basic progress statistics are displayed + (no ETA, no progressbar). + If `gui` is True and this parameter needs subsequent updating, + specify an initial arbitrary large positive number, + e.g. 9e9. + leave : bool, optional + If [default: True], keeps all traces of the progressbar + upon termination of iteration. + If `None`, will leave only if `position` is `0`. + file : `io.TextIOWrapper` or `io.StringIO`, optional + Specifies where to output the progress messages + (default: sys.stderr). Uses `file.write(str)` and `file.flush()` + methods. For encoding, see `write_bytes`. + ncols : int, optional + The width of the entire output message. If specified, + dynamically resizes the progressbar to stay within this bound. + If unspecified, attempts to use environment width. The + fallback is a meter width of 10 and no limit for the counter and + statistics. If 0, will not print any meter (only stats). + mininterval : float, optional + Minimum progress display update interval [default: 0.1] seconds. + maxinterval : float, optional + Maximum progress display update interval [default: 10] seconds. + Automatically adjusts `miniters` to correspond to `mininterval` + after long display update lag. Only works if `dynamic_miniters` + or monitor thread is enabled. + miniters : int or float, optional + Minimum progress display update interval, in iterations. + If 0 and `dynamic_miniters`, will automatically adjust to equal + `mininterval` (more CPU efficient, good for tight loops). + If > 0, will skip display of specified number of iterations. + Tweak this and `mininterval` to get very efficient loops. + If your progress is erratic with both fast and slow iterations + (network, skipping items, etc) you should set miniters=1. + ascii : bool or str, optional + If unspecified or False, use unicode (smooth blocks) to fill + the meter. The fallback is to use ASCII characters " 123456789#". + disable : bool, optional + Whether to disable the entire progressbar wrapper + [default: False]. If set to None, disable on non-TTY. + unit : str, optional + String that will be used to define the unit of each iteration + [default: it]. + unit_scale : bool or int or float, optional + If 1 or True, the number of iterations will be reduced/scaled + automatically and a metric prefix following the + International System of Units standard will be added + (kilo, mega, etc.) [default: False]. If any other non-zero + number, will scale `total` and `n`. + dynamic_ncols : bool, optional + If set, constantly alters `ncols` and `nrows` to the + environment (allowing for window resizes) [default: False]. + smoothing : float, optional + Exponential moving average smoothing factor for speed estimates + (ignored in GUI mode). Ranges from 0 (average speed) to 1 + (current/instantaneous speed) [default: 0.3]. + bar_format : str, optional + Specify a custom bar string formatting. May impact performance. + [default: '{l_bar}{bar}{r_bar}'], where + l_bar='{desc}: {percentage:3.0f}%|' and + r_bar='| {n_fmt}/{total_fmt} [{elapsed}<{remaining}, ' + '{rate_fmt}{postfix}]' + Possible vars: l_bar, bar, r_bar, n, n_fmt, total, total_fmt, + percentage, elapsed, elapsed_s, ncols, nrows, desc, unit, + rate, rate_fmt, rate_noinv, rate_noinv_fmt, + rate_inv, rate_inv_fmt, postfix, unit_divisor, + remaining, remaining_s, eta. + Note that a trailing ": " is automatically removed after {desc} + if the latter is empty. + initial : int or float, optional + The initial counter value. Useful when restarting a progress + bar [default: 0]. If using float, consider specifying `{n:.3f}` + or similar in `bar_format`, or specifying `unit_scale`. + position : int, optional + Specify the line offset to print this bar (starting from 0) + Automatic if unspecified. + Useful to manage multiple bars at once (eg, from threads). + postfix : dict or *, optional + Specify additional stats to display at the end of the bar. + Calls `set_postfix(**postfix)` if possible (dict). + unit_divisor : float, optional + [default: 1000], ignored unless `unit_scale` is True. + write_bytes : bool, optional + Whether to write bytes. If (default: False) will write unicode. + lock_args : tuple, optional + Passed to `refresh` for intermediate output + (initialisation, iterating, and updating). + nrows : int, optional + The screen height. If specified, hides nested bars outside this + bound. If unspecified, attempts to use environment height. + The fallback is 20. + colour : str, optional + Bar colour (e.g. 'green', '#00ff00'). + delay : float, optional + Don't display until [default: 0] seconds have elapsed. + gui : bool, optional + WARNING: internal parameter - do not use. + Use tqdm.gui.tqdm(...) instead. If set, will attempt to use + matplotlib animations for a graphical output [default: False]. + + Returns + ------- + out : decorated iterator. + """ + + monitor_interval = 10 # set to 0 to disable the thread + monitor = None + _instances = WeakSet() + + @staticmethod + def format_sizeof(num, suffix='', divisor=1000): + """ + Formats a number (greater than unity) with SI Order of Magnitude + prefixes. + + Parameters + ---------- + num : float + Number ( >= 1) to format. + suffix : str, optional + Post-postfix [default: '']. + divisor : float, optional + Divisor between prefixes [default: 1000]. + + Returns + ------- + out : str + Number with Order of Magnitude SI unit postfix. + """ + for unit in ['', 'k', 'M', 'G', 'T', 'P', 'E', 'Z']: + if abs(num) < 999.5: + if abs(num) < 99.95: + if abs(num) < 9.995: + return f'{num:1.2f}{unit}{suffix}' + return f'{num:2.1f}{unit}{suffix}' + return f'{num:3.0f}{unit}{suffix}' + num /= divisor + return f'{num:3.1f}Y{suffix}' + + @staticmethod + def format_interval(t): + """ + Formats a number of seconds as a clock time, [H:]MM:SS + + Parameters + ---------- + t : int + Number of seconds. + + Returns + ------- + out : str + [H:]MM:SS + """ + mins, s = divmod(int(t), 60) + h, m = divmod(mins, 60) + return f'{h:d}:{m:02d}:{s:02d}' if h else f'{m:02d}:{s:02d}' + + @staticmethod + def format_num(n): + """ + Intelligent scientific notation (.3g). + + Parameters + ---------- + n : int or float or Numeric + A Number. + + Returns + ------- + out : str + Formatted number. + """ + f = f'{n:.3g}'.replace('e+0', 'e+').replace('e-0', 'e-') + n = str(n) + return f if len(f) < len(n) else n + + @staticmethod + def status_printer(file): + """ + Manage the printing and in-place updating of a line of characters. + Note that if the string is longer than a line, then in-place + updating may not work (it will print a new line at each refresh). + """ + fp = file + fp_flush = getattr(fp, 'flush', lambda: None) # pragma: no cover + if fp in (sys.stderr, sys.stdout): + getattr(sys.stderr, 'flush', lambda: None)() + getattr(sys.stdout, 'flush', lambda: None)() + + def fp_write(s): + fp.write(str(s)) + fp_flush() + + last_len = [0] + + def print_status(s): + len_s = disp_len(s) + fp_write('\r' + s + (' ' * max(last_len[0] - len_s, 0))) + last_len[0] = len_s + + return print_status + + @staticmethod + def format_meter(n, total, elapsed, ncols=None, prefix='', ascii=False, unit='it', + unit_scale=False, rate=None, bar_format=None, postfix=None, + unit_divisor=1000, initial=0, colour=None, **extra_kwargs): + """ + Return a string-based progress bar given some parameters + + Parameters + ---------- + n : int or float + Number of finished iterations. + total : int or float + The expected total number of iterations. If meaningless (None), + only basic progress statistics are displayed (no ETA). + elapsed : float + Number of seconds passed since start. + ncols : int, optional + The width of the entire output message. If specified, + dynamically resizes `{bar}` to stay within this bound + [default: None]. If `0`, will not print any bar (only stats). + The fallback is `{bar:10}`. + prefix : str, optional + Prefix message (included in total width) [default: '']. + Use as {desc} in bar_format string. + ascii : bool, optional or str, optional + If not set, use unicode (smooth blocks) to fill the meter + [default: False]. The fallback is to use ASCII characters + " 123456789#". + unit : str, optional + The iteration unit [default: 'it']. + unit_scale : bool or int or float, optional + If 1 or True, the number of iterations will be printed with an + appropriate SI metric prefix (k = 10^3, M = 10^6, etc.) + [default: False]. If any other non-zero number, will scale + `total` and `n`. + rate : float, optional + Manual override for iteration rate. + If [default: None], uses n/elapsed. + bar_format : str, optional + Specify a custom bar string formatting. May impact performance. + [default: '{l_bar}{bar}{r_bar}'], where + l_bar='{desc}: {percentage:3.0f}%|' and + r_bar='| {n_fmt}/{total_fmt} [{elapsed}<{remaining}, ' + '{rate_fmt}{postfix}]' + Possible vars: l_bar, bar, r_bar, n, n_fmt, total, total_fmt, + percentage, elapsed, elapsed_s, ncols, nrows, desc, unit, + rate, rate_fmt, rate_noinv, rate_noinv_fmt, + rate_inv, rate_inv_fmt, postfix, unit_divisor, + remaining, remaining_s, eta. + Note that a trailing ": " is automatically removed after {desc} + if the latter is empty. + postfix : *, optional + Similar to `prefix`, but placed at the end + (e.g. for additional stats). + Note: postfix is usually a string (not a dict) for this method, + and will if possible be set to postfix = ', ' + postfix. + However other types are supported (#382). + unit_divisor : float, optional + [default: 1000], ignored unless `unit_scale` is True. + initial : int or float, optional + The initial counter value [default: 0]. + colour : str, optional + Bar colour (e.g. 'green', '#00ff00'). + + Returns + ------- + out : Formatted meter and stats, ready to display. + """ + + # sanity check: total + if total and n >= (total + 0.5): # allow float imprecision (#849) + total = None + + # apply custom scale if necessary + if unit_scale and unit_scale not in (True, 1): + if total: + total *= unit_scale + n *= unit_scale + if rate: + rate *= unit_scale # by default rate = self.avg_dn / self.avg_dt + unit_scale = False + + elapsed_str = tqdm.format_interval(elapsed) + + # if unspecified, attempt to use rate = average speed + # (we allow manual override since predicting time is an arcane art) + if rate is None and elapsed: + rate = (n - initial) / elapsed + inv_rate = 1 / rate if rate else None + format_sizeof = tqdm.format_sizeof + rate_noinv_fmt = ((format_sizeof(rate) if unit_scale else f'{rate:5.2f}') + if rate else '?') + unit + '/s' + rate_inv_fmt = ( + (format_sizeof(inv_rate) if unit_scale else f'{inv_rate:5.2f}') + if inv_rate else '?') + 's/' + unit + rate_fmt = rate_inv_fmt if inv_rate and inv_rate > 1 else rate_noinv_fmt + + if unit_scale: + n_fmt = format_sizeof(n, divisor=unit_divisor) + total_fmt = format_sizeof(total, divisor=unit_divisor) if total is not None else '?' + else: + n_fmt = str(n) + total_fmt = str(total) if total is not None else '?' + + try: + postfix = ', ' + postfix if postfix else '' + except TypeError: + pass + + remaining = (total - n) / rate if rate and total else 0 + remaining_str = tqdm.format_interval(remaining) if rate else '?' + try: + eta_dt = (datetime.now() + timedelta(seconds=remaining) + if rate and total else datetime.fromtimestamp(0, timezone.utc)) + except OverflowError: + eta_dt = datetime.max + + # format the stats displayed to the left and right sides of the bar + if prefix: + # old prefix setup work around + bool_prefix_colon_already = (prefix[-2:] == ": ") + l_bar = prefix if bool_prefix_colon_already else prefix + ": " + else: + l_bar = '' + + r_bar = f'| {n_fmt}/{total_fmt} [{elapsed_str}<{remaining_str}, {rate_fmt}{postfix}]' + + # Custom bar formatting + # Populate a dict with all available progress indicators + format_dict = { + # slight extension of self.format_dict + 'n': n, 'n_fmt': n_fmt, 'total': total, 'total_fmt': total_fmt, + 'elapsed': elapsed_str, 'elapsed_s': elapsed, + 'ncols': ncols, 'desc': prefix or '', 'unit': unit, + 'rate': inv_rate if inv_rate and inv_rate > 1 else rate, + 'rate_fmt': rate_fmt, 'rate_noinv': rate, + 'rate_noinv_fmt': rate_noinv_fmt, 'rate_inv': inv_rate, + 'rate_inv_fmt': rate_inv_fmt, + 'postfix': postfix, 'unit_divisor': unit_divisor, + 'colour': colour, + # plus more useful definitions + 'remaining': remaining_str, 'remaining_s': remaining, + 'l_bar': l_bar, 'r_bar': r_bar, 'eta': eta_dt, + **extra_kwargs} + + # total is known: we can predict some stats + if total: + # fractional and percentage progress + frac = n / total + percentage = frac * 100 + + l_bar += f'{percentage:3.0f}%|' + + if ncols == 0: + return l_bar[:-1] + r_bar[1:] + + format_dict.update(l_bar=l_bar) + if bar_format: + format_dict.update(percentage=percentage) + + # auto-remove colon for empty `{desc}` + if not prefix: + bar_format = bar_format.replace("{desc}: ", '') + else: + bar_format = "{l_bar}{bar}{r_bar}" + + full_bar = FormatReplace() + nobar = bar_format.format(bar=full_bar, **format_dict) + if not full_bar.format_called: + return nobar # no `{bar}`; nothing else to do + + # Formatting progress bar space available for bar's display + full_bar = Bar(frac, + max(1, ncols - disp_len(nobar)) if ncols else 10, + charset=Bar.ASCII if ascii is True else ascii or Bar.UTF, + colour=colour) + if not _is_ascii(full_bar.charset) and _is_ascii(bar_format): + bar_format = str(bar_format) + res = bar_format.format(bar=full_bar, **format_dict) + return disp_trim(res, ncols) if ncols else res + + elif bar_format: + # user-specified bar_format but no total + l_bar += '|' + format_dict.update(l_bar=l_bar, percentage=0) + full_bar = FormatReplace() + nobar = bar_format.format(bar=full_bar, **format_dict) + if not full_bar.format_called: + return nobar + full_bar = Bar(0, + max(1, ncols - disp_len(nobar)) if ncols else 10, + charset=Bar.BLANK, colour=colour) + res = bar_format.format(bar=full_bar, **format_dict) + return disp_trim(res, ncols) if ncols else res + else: + # no total: no progressbar, ETA, just progress stats + return (f'{(prefix + ": ") if prefix else ""}' + f'{n_fmt}{unit} [{elapsed_str}, {rate_fmt}{postfix}]') + + def __new__(cls, *_, **__): + instance = object.__new__(cls) + with cls.get_lock(): # also constructs lock if non-existent + cls._instances.add(instance) + # create monitoring thread + if cls.monitor_interval and (cls.monitor is None + or not cls.monitor.report()): + try: + cls.monitor = TMonitor(cls, cls.monitor_interval) + except Exception as e: # pragma: nocover + warn("tqdm:disabling monitor support" + " (monitor_interval = 0) due to:\n" + str(e), + TqdmMonitorWarning, stacklevel=2) + cls.monitor_interval = 0 + return instance + + @classmethod + def _get_free_pos(cls, instance=None): + """Skips specified instance.""" + positions = {abs(inst.pos) for inst in cls._instances + if inst is not instance and hasattr(inst, "pos")} + return min(set(range(len(positions) + 1)).difference(positions)) + + @classmethod + def _decr_instances(cls, instance): + """ + Remove from list and reposition another unfixed bar + to fill the new gap. + + This means that by default (where all nested bars are unfixed), + order is not maintained but screen flicker/blank space is minimised. + (tqdm<=4.44.1 moved ALL subsequent unfixed bars up.) + """ + with cls._lock: + try: + cls._instances.remove(instance) + except KeyError: + # if not instance.gui: # pragma: no cover + # raise + pass # py2: maybe magically removed already + # else: + if not instance.gui: + last = (instance.nrows or 20) - 1 + # find unfixed (`pos >= 0`) overflow (`pos >= nrows - 1`) + instances = list(filter( + lambda i: hasattr(i, "pos") and last <= i.pos, + cls._instances)) + # set first found to current `pos` + if instances: + inst = min(instances, key=lambda i: i.pos) + inst.clear(nolock=True) + inst.pos = abs(instance.pos) + + @classmethod + def write(cls, s, file=None, end="\n", nolock=False): + """Print a message via tqdm (without overlap with bars).""" + fp = file if file is not None else sys.stdout + with cls.external_write_mode(file=file, nolock=nolock): + # Write the message + fp.write(s) + fp.write(end) + + @classmethod + @contextmanager + def external_write_mode(cls, file=None, nolock=False): + """ + Disable tqdm within context and refresh tqdm when exits. + Useful when writing to standard output stream + """ + fp = file if file is not None else sys.stdout + + try: + if not nolock: + cls.get_lock().acquire() + # Clear all bars + inst_cleared = [] + for inst in getattr(cls, '_instances', []): + # Clear instance if in the target output file + # or if write output + tqdm output are both either + # sys.stdout or sys.stderr (because both are mixed in terminal) + if hasattr(inst, "start_t") and (inst.fp == fp or all( + f in (sys.stdout, sys.stderr) for f in (fp, inst.fp))): + inst.clear(nolock=True) + inst_cleared.append(inst) + yield + # Force refresh display of bars we cleared + for inst in inst_cleared: + inst.refresh(nolock=True) + finally: + if not nolock: + cls._lock.release() + + @classmethod + def set_lock(cls, lock): + """Set the global lock.""" + cls._lock = lock + + @classmethod + def get_lock(cls): + """Get the global lock. Construct it if it does not exist.""" + if not hasattr(cls, '_lock'): + cls._lock = TqdmDefaultWriteLock() + return cls._lock + + @classmethod + def pandas(cls, **tqdm_kwargs): + """ + Registers the current `tqdm` class with + pandas.core. + ( frame.DataFrame + | series.Series + | groupby.(generic.)DataFrameGroupBy + | groupby.(generic.)SeriesGroupBy + ).progress_apply + + A new instance will be created every time `progress_apply` is called, + and each instance will automatically `close()` upon completion. + + Parameters + ---------- + tqdm_kwargs : arguments for the tqdm instance + + Examples + -------- + >>> import pandas as pd + >>> import numpy as np + >>> from tqdm import tqdm + >>> from tqdm.gui import tqdm as tqdm_gui + >>> + >>> df = pd.DataFrame(np.random.randint(0, 100, (100000, 6))) + >>> tqdm.pandas(ncols=50) # can use tqdm_gui, optional kwargs, etc + >>> # Now you can use `progress_apply` instead of `apply` + >>> df.groupby(0).progress_apply(lambda x: x**2) + + References + ---------- + + """ + from warnings import catch_warnings, simplefilter + + from pandas.core.frame import DataFrame + from pandas.core.series import Series + try: + with catch_warnings(): + simplefilter("ignore", category=FutureWarning) + from pandas import Panel + except ImportError: # pandas>=1.2.0 + Panel = None + Rolling, Expanding = None, None + try: # pandas>=1.0.0 + from pandas.core.window.rolling import _Rolling_and_Expanding + except ImportError: + try: # pandas>=0.18.0 + from pandas.core.window import _Rolling_and_Expanding + except ImportError: # pandas>=1.2.0 + try: # pandas>=1.2.0 + from pandas.core.window.expanding import Expanding + from pandas.core.window.rolling import Rolling + _Rolling_and_Expanding = Rolling, Expanding + except ImportError: # pragma: no cover + _Rolling_and_Expanding = None + try: # pandas>=0.25.0 + from pandas.core.groupby.generic import SeriesGroupBy # , NDFrameGroupBy + from pandas.core.groupby.generic import DataFrameGroupBy + except ImportError: # pragma: no cover + try: # pandas>=0.23.0 + from pandas.core.groupby.groupby import DataFrameGroupBy, SeriesGroupBy + except ImportError: + from pandas.core.groupby import DataFrameGroupBy, SeriesGroupBy + try: # pandas>=0.23.0 + from pandas.core.groupby.groupby import GroupBy + except ImportError: # pragma: no cover + from pandas.core.groupby import GroupBy + + try: # pandas>=0.23.0 + from pandas.core.groupby.groupby import PanelGroupBy + except ImportError: + try: + from pandas.core.groupby import PanelGroupBy + except ImportError: # pandas>=0.25.0 + PanelGroupBy = None + + tqdm_kwargs = tqdm_kwargs.copy() + deprecated_t = [tqdm_kwargs.pop('deprecated_t', None)] + + def inner_generator(df_function='apply'): + def inner(df, func, *args, **kwargs): + """ + Parameters + ---------- + df : (DataFrame|Series)[GroupBy] + Data (may be grouped). + func : function + To be applied on the (grouped) data. + **kwargs : optional + Transmitted to `df.apply()`. + """ + + # Precompute total iterations + total = tqdm_kwargs.pop("total", getattr(df, 'ngroups', None)) + if total is None: # not grouped + if df_function == 'applymap': + total = df.size + elif isinstance(df, Series): + total = len(df) + elif (_Rolling_and_Expanding is None or + not isinstance(df, _Rolling_and_Expanding)): + # DataFrame or Panel + axis = kwargs.get('axis', 0) + if axis == 'index': + axis = 0 + elif axis == 'columns': + axis = 1 + # when axis=0, total is shape[axis1] + total = df.size // df.shape[axis] + + # Init bar + if deprecated_t[0] is not None: + t = deprecated_t[0] + deprecated_t[0] = None + else: + t = cls(total=total, **tqdm_kwargs) + + if len(args) > 0: + # *args intentionally not supported (see #244, #299) + TqdmDeprecationWarning( + "Except func, normal arguments are intentionally" + + " not supported by" + + " `(DataFrame|Series|GroupBy).progress_apply`." + + " Use keyword arguments instead.", + fp_write=getattr(t.fp, 'write', sys.stderr.write)) + + try: # pandas>=1.3.0 + from pandas.core.common import is_builtin_func + except ImportError: + is_builtin_func = df._is_builtin_func + try: + func = is_builtin_func(func) + except TypeError: + pass + + # Define bar updating wrapper + def wrapper(*args, **kwargs): + # update tbar correctly + # it seems `pandas apply` calls `func` twice + # on the first column/row to decide whether it can + # take a fast or slow code path; so stop when t.total==t.n + t.update(n=1 if not t.total or t.n < t.total else 0) + return func(*args, **kwargs) + + # Apply the provided function (in **kwargs) + # on the df using our wrapper (which provides bar updating) + try: + return getattr(df, df_function)(wrapper, **kwargs) + finally: + t.close() + + return inner + + # Monkeypatch pandas to provide easy methods + # Enable custom tqdm progress in pandas! + Series.progress_apply = inner_generator() + SeriesGroupBy.progress_apply = inner_generator() + Series.progress_map = inner_generator('map') + SeriesGroupBy.progress_map = inner_generator('map') + + DataFrame.progress_apply = inner_generator() + DataFrameGroupBy.progress_apply = inner_generator() + DataFrame.progress_applymap = inner_generator('applymap') + DataFrame.progress_map = inner_generator('map') + DataFrameGroupBy.progress_map = inner_generator('map') + + if Panel is not None: + Panel.progress_apply = inner_generator() + if PanelGroupBy is not None: + PanelGroupBy.progress_apply = inner_generator() + + GroupBy.progress_apply = inner_generator() + GroupBy.progress_aggregate = inner_generator('aggregate') + GroupBy.progress_transform = inner_generator('transform') + + if Rolling is not None and Expanding is not None: + Rolling.progress_apply = inner_generator() + Expanding.progress_apply = inner_generator() + elif _Rolling_and_Expanding is not None: + _Rolling_and_Expanding.progress_apply = inner_generator() + + # override defaults via env vars + @envwrap("TQDM_", is_method=True, types={'total': float, 'ncols': int, 'miniters': float, + 'position': int, 'nrows': int}) + def __init__(self, iterable=None, desc=None, total=None, leave=True, file=None, + ncols=None, mininterval=0.1, maxinterval=10.0, miniters=None, + ascii=None, disable=False, unit='it', unit_scale=False, + dynamic_ncols=False, smoothing=0.3, bar_format=None, initial=0, + position=None, postfix=None, unit_divisor=1000, write_bytes=False, + lock_args=None, nrows=None, colour=None, delay=0.0, gui=False, + **kwargs): + """see tqdm.tqdm for arguments""" + if file is None: + file = sys.stderr + + if write_bytes: + # Despite coercing unicode into bytes, py2 sys.std* streams + # should have bytes written to them. + file = SimpleTextIOWrapper( + file, encoding=getattr(file, 'encoding', None) or 'utf-8') + + file = DisableOnWriteError(file, tqdm_instance=self) + + if disable is None and hasattr(file, "isatty") and not file.isatty(): + disable = True + + if total is None and iterable is not None: + try: + total = len(iterable) + except (TypeError, AttributeError): + total = None + if total == float("inf"): + # Infinite iterations, behave same as unknown + total = None + + if disable: + self.iterable = iterable + self.disable = disable + with self._lock: + self.pos = self._get_free_pos(self) + self._instances.remove(self) + self.n = initial + self.total = total + self.leave = leave + return + + if kwargs: + self.disable = True + with self._lock: + self.pos = self._get_free_pos(self) + self._instances.remove(self) + raise ( + TqdmDeprecationWarning( + "`nested` is deprecated and automated.\n" + "Use `position` instead for manual control.\n", + fp_write=getattr(file, 'write', sys.stderr.write)) + if "nested" in kwargs else + TqdmKeyError("Unknown argument(s): " + str(kwargs))) + + # Preprocess the arguments + if ( + (ncols is None or nrows is None) and (file in (sys.stderr, sys.stdout)) + ) or dynamic_ncols: # pragma: no cover + if dynamic_ncols: + dynamic_ncols = _screen_shape_wrapper() + if dynamic_ncols: + ncols, nrows = dynamic_ncols(file) + else: + _dynamic_ncols = _screen_shape_wrapper() + if _dynamic_ncols: + _ncols, _nrows = _dynamic_ncols(file) + if ncols is None: + ncols = _ncols + if nrows is None: + nrows = _nrows + + if miniters is None: + miniters = 0 + dynamic_miniters = True + else: + dynamic_miniters = False + + if mininterval is None: + mininterval = 0 + + if maxinterval is None: + maxinterval = 0 + + if ascii is None: + ascii = not _supports_unicode(file) + + if bar_format and ascii is not True and not _is_ascii(ascii): + # Convert bar format into unicode since terminal uses unicode + bar_format = str(bar_format) + + if smoothing is None: + smoothing = 0 + + # Store the arguments + self.iterable = iterable + self.desc = desc or '' + self.total = total + self.leave = leave + self.fp = file + self.ncols = ncols + self.nrows = nrows + self.mininterval = mininterval + self.maxinterval = maxinterval + self.miniters = miniters + self.dynamic_miniters = dynamic_miniters + self.ascii = ascii + self.disable = disable + self.unit = unit + self.unit_scale = unit_scale + self.unit_divisor = unit_divisor + self.initial = initial + self.lock_args = lock_args + self.delay = delay + self.gui = gui + self.dynamic_ncols = dynamic_ncols + self.smoothing = smoothing + self._ema_dn = EMA(smoothing) + self._ema_dt = EMA(smoothing) + self._ema_miniters = EMA(smoothing) + self.bar_format = bar_format + self.postfix = None + self.colour = colour + self._time = time + if postfix: + try: + self.set_postfix(refresh=False, **postfix) + except TypeError: + self.postfix = postfix + + # Init the iterations counters + self.last_print_n = initial + self.n = initial + + # if nested, at initial sp() call we replace '\r' by '\n' to + # not overwrite the outer progress bar + with self._lock: + # mark fixed positions as negative + self.pos = self._get_free_pos(self) if position is None else -position + + if not gui: + # Initialize the screen printer + self.sp = self.status_printer(self.fp) + if delay <= 0: + self.refresh(lock_args=self.lock_args) + + # Init the time counter + self.last_print_t = self._time() + # NB: Avoid race conditions by setting start_t at the very end of init + self.start_t = self.last_print_t + + def __bool__(self): + if self.total is not None: + return self.total > 0 + if self.iterable is None: + raise TypeError('bool() undefined when iterable == total == None') + return bool(self.iterable) + + def __len__(self): + return ( + self.total if self.iterable is None + else self.iterable.shape[0] if hasattr(self.iterable, "shape") + else len(self.iterable) if hasattr(self.iterable, "__len__") + else self.iterable.__length_hint__() if hasattr(self.iterable, "__length_hint__") + else getattr(self, "total", None)) + + def __reversed__(self): + try: + orig = self.iterable + except AttributeError: + raise TypeError("'tqdm' object is not reversible") + else: + self.iterable = reversed(self.iterable) + return self.__iter__() + finally: + self.iterable = orig + + def __contains__(self, item): + contains = getattr(self.iterable, '__contains__', None) + return contains(item) if contains is not None else item in self.__iter__() + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + try: + self.close() + except AttributeError: + # maybe eager thread cleanup upon external error + if (exc_type, exc_value, traceback) == (None, None, None): + raise + warn("AttributeError ignored", TqdmWarning, stacklevel=2) + + def __del__(self): + self.close() + + def __str__(self): + return self.format_meter(**self.format_dict) + + @property + def _comparable(self): + return abs(getattr(self, "pos", 1 << 31)) + + def __hash__(self): + return id(self) + + def __iter__(self): + """Backward-compatibility to use: for x in tqdm(iterable)""" + + # Inlining instance variables as locals (speed optimisation) + iterable = self.iterable + + # If the bar is disabled, then just walk the iterable + # (note: keep this check outside the loop for performance) + if self.disable: + for obj in iterable: + yield obj + return + + mininterval = self.mininterval + last_print_t = self.last_print_t + last_print_n = self.last_print_n + min_start_t = self.start_t + self.delay + n = self.n + time = self._time + + try: + for obj in iterable: + yield obj + # Update and possibly print the progressbar. + # Note: does not call self.update(1) for speed optimisation. + n += 1 + + if n - last_print_n >= self.miniters: + cur_t = time() + dt = cur_t - last_print_t + if dt >= mininterval and cur_t >= min_start_t: + self.update(n - last_print_n) + last_print_n = self.last_print_n + last_print_t = self.last_print_t + finally: + self.n = n + self.close() + + def update(self, n=1): + """ + Manually update the progress bar, useful for streams + such as reading files. + E.g.: + >>> t = tqdm(total=filesize) # Initialise + >>> for current_buffer in stream: + ... ... + ... t.update(len(current_buffer)) + >>> t.close() + The last line is highly recommended, but possibly not necessary if + `t.update()` will be called in such a way that `filesize` will be + exactly reached and printed. + + Parameters + ---------- + n : int or float, optional + Increment to add to the internal counter of iterations + [default: 1]. If using float, consider specifying `{n:.3f}` + or similar in `bar_format`, or specifying `unit_scale`. + + Returns + ------- + out : bool or None + True if a `display()` was triggered. + """ + if self.disable: + return + + if n < 0: + self.last_print_n += n # for auto-refresh logic to work + self.n += n + + # check counter first to reduce calls to time() + if self.n - self.last_print_n >= self.miniters: + cur_t = self._time() + dt = cur_t - self.last_print_t + if dt >= self.mininterval and cur_t >= self.start_t + self.delay: + cur_t = self._time() + dn = self.n - self.last_print_n # >= n + if self.smoothing and dt and dn: + # EMA (not just overall average) + self._ema_dn(dn) + self._ema_dt(dt) + self.refresh(lock_args=self.lock_args) + if self.dynamic_miniters: + # If no `miniters` was specified, adjust automatically to the + # maximum iteration rate seen so far between two prints. + # e.g.: After running `tqdm.update(5)`, subsequent + # calls to `tqdm.update()` will only cause an update after + # at least 5 more iterations. + if self.maxinterval and dt >= self.maxinterval: + self.miniters = dn * (self.mininterval or self.maxinterval) / dt + elif self.smoothing: + # EMA miniters update + self.miniters = self._ema_miniters( + dn * (self.mininterval / dt if self.mininterval and dt + else 1)) + else: + # max iters between two prints + self.miniters = max(self.miniters, dn) + + # Store old values for next call + self.last_print_n = self.n + self.last_print_t = cur_t + return True + + def close(self): + """Cleanup and (if leave=False) close the progressbar.""" + if self.disable: + return + + # Prevent multiple closures + self.disable = True + + # decrement instance pos and remove from internal set + pos = abs(self.pos) + self._decr_instances(self) + + if self.last_print_t < self.start_t + self.delay: + # haven't ever displayed; nothing to clear + return + + # GUI mode + if getattr(self, 'sp', None) is None: + return + + # annoyingly, _supports_unicode isn't good enough + def fp_write(s): + self.fp.write(str(s)) + + try: + fp_write('') + except ValueError as e: + if 'closed' in str(e): + return + raise # pragma: no cover + + leave = pos == 0 if self.leave is None else self.leave + + with self._lock: + if leave: + # stats for overall rate (no weighted average) + self._ema_dt = lambda: None + self.display(pos=0) + fp_write('\n') + else: + # clear previous display + if self.display(msg='', pos=pos) and not pos: + fp_write('\r') + + def clear(self, nolock=False): + """Clear current bar display.""" + if self.disable: + return + + if not nolock: + self._lock.acquire() + pos = abs(self.pos) + if pos < (self.nrows or 20): + self.moveto(pos) + self.sp('') + self.fp.write('\r') # place cursor back at the beginning of line + self.moveto(-pos) + if not nolock: + self._lock.release() + + def refresh(self, nolock=False, lock_args=None): + """ + Force refresh the display of this bar. + + Parameters + ---------- + nolock : bool, optional + If `True`, does not lock. + If [default: `False`]: calls `acquire()` on internal lock. + lock_args : tuple, optional + Passed to internal lock's `acquire()`. + If specified, will only `display()` if `acquire()` returns `True`. + """ + if self.disable: + return + + if not nolock: + if lock_args: + if not self._lock.acquire(*lock_args): + return False + else: + self._lock.acquire() + self.display() + if not nolock: + self._lock.release() + return True + + def unpause(self): + """Restart tqdm timer from last print time.""" + if self.disable: + return + cur_t = self._time() + self.start_t += cur_t - self.last_print_t + self.last_print_t = cur_t + + def reset(self, total=None): + """ + Resets to 0 iterations for repeated use. + + Consider combining with `leave=True`. + + Parameters + ---------- + total : int or float, optional. Total to use for the new bar. + """ + self.n = 0 + if total is not None: + self.total = total + if self.disable: + return + self.last_print_n = 0 + self.last_print_t = self.start_t = self._time() + self._ema_dn = EMA(self.smoothing) + self._ema_dt = EMA(self.smoothing) + self._ema_miniters = EMA(self.smoothing) + self.refresh() + + def set_description(self, desc=None, refresh=True): + """ + Set/modify description of the progress bar. + + Parameters + ---------- + desc : str, optional + refresh : bool, optional + Forces refresh [default: True]. + """ + self.desc = desc + ': ' if desc else '' + if refresh: + self.refresh() + + def set_description_str(self, desc=None, refresh=True): + """Set/modify description without ': ' appended.""" + self.desc = desc or '' + if refresh: + self.refresh() + + def set_postfix(self, ordered_dict=None, refresh=True, **kwargs): + """ + Set/modify postfix (additional stats) + with automatic formatting based on datatype. + + Parameters + ---------- + ordered_dict : dict or OrderedDict, optional + refresh : bool, optional + Forces refresh [default: True]. + kwargs : dict, optional + """ + # Sort in alphabetical order to be more deterministic + postfix = OrderedDict([] if ordered_dict is None else ordered_dict) + for key in sorted(kwargs.keys()): + postfix[key] = kwargs[key] + # Preprocess stats according to datatype + for key in postfix.keys(): + # Number: limit the length of the string + if isinstance(postfix[key], Number): + postfix[key] = self.format_num(postfix[key]) + # Else for any other type, try to get the string conversion + elif not isinstance(postfix[key], str): + postfix[key] = str(postfix[key]) + # Else if it's a string, don't need to preprocess anything + # Stitch together to get the final postfix + self.postfix = ', '.join(key + '=' + postfix[key].strip() + for key in postfix.keys()) + if refresh: + self.refresh() + + def set_postfix_str(self, s='', refresh=True): + """ + Postfix without dictionary expansion, similar to prefix handling. + """ + self.postfix = str(s) + if refresh: + self.refresh() + + def moveto(self, n): + # TODO: private method + self.fp.write('\n' * n + _term_move_up() * -n) + getattr(self.fp, 'flush', lambda: None)() + + @property + def format_dict(self): + """Public API for read-only member access.""" + if self.disable and not hasattr(self, 'unit'): + return defaultdict(lambda: None, { + 'n': self.n, 'total': self.total, 'elapsed': 0, 'unit': 'it'}) + if self.dynamic_ncols: + self.ncols, self.nrows = self.dynamic_ncols(self.fp) + return { + 'n': self.n, 'total': self.total, + 'elapsed': self._time() - self.start_t if hasattr(self, 'start_t') else 0, + 'ncols': self.ncols, 'nrows': self.nrows, 'prefix': self.desc, + 'ascii': self.ascii, 'unit': self.unit, 'unit_scale': self.unit_scale, + 'rate': self._ema_dn() / self._ema_dt() if self._ema_dt() else None, + 'bar_format': self.bar_format, 'postfix': self.postfix, + 'unit_divisor': self.unit_divisor, 'initial': self.initial, + 'colour': self.colour} + + def display(self, msg=None, pos=None): + """ + Use `self.sp` to display `msg` in the specified `pos`. + + Consider overloading this function when inheriting to use e.g.: + `self.some_frontend(**self.format_dict)` instead of `self.sp`. + + Parameters + ---------- + msg : str, optional. What to display (default: `repr(self)`). + pos : int, optional. Position to `moveto` + (default: `abs(self.pos)`). + """ + if pos is None: + pos = abs(self.pos) + + nrows = self.nrows or 20 + if pos >= nrows - 1: + if pos >= nrows: + return False + if msg or msg is None: # override at `nrows - 1` + msg = " ... (more hidden) ..." + + if not hasattr(self, "sp"): + raise TqdmDeprecationWarning( + "Please use `tqdm.gui.tqdm(...)`" + " instead of `tqdm(..., gui=True)`\n", + fp_write=getattr(self.fp, 'write', sys.stderr.write)) + + if pos: + self.moveto(pos) + self.sp(self.__str__() if msg is None else msg) + if pos: + self.moveto(-pos) + return True + + @classmethod + @contextmanager + def wrapattr(cls, stream, method, total=None, bytes=True, **tqdm_kwargs): + """ + stream : file-like object. + method : str, "read" or "write". The result of `read()` and + the first argument of `write()` should have a `len()`. + + >>> with tqdm.wrapattr(file_obj, "read", total=file_obj.size) as fobj: + ... while True: + ... chunk = fobj.read(chunk_size) + ... if not chunk: + ... break + """ + with cls(total=total, **tqdm_kwargs) as t: + if bytes: + t.unit = "B" + t.unit_scale = True + t.unit_divisor = 1024 + yield CallbackIOWrapper(t.update, stream, method) + + +def trange(*args, **kwargs): + """Shortcut for tqdm(range(*args), **kwargs).""" + return tqdm(range(*args), **kwargs) diff --git a/venv/lib/python3.12/site-packages/tqdm/tk.py b/venv/lib/python3.12/site-packages/tqdm/tk.py new file mode 100644 index 00000000..788303c8 --- /dev/null +++ b/venv/lib/python3.12/site-packages/tqdm/tk.py @@ -0,0 +1,196 @@ +""" +Tkinter GUI progressbar decorator for iterators. + +Usage: +>>> from tqdm.tk import trange, tqdm +>>> for i in trange(10): +... ... +""" +import re +import sys +import tkinter +import tkinter.ttk as ttk +from warnings import warn + +from .std import TqdmExperimentalWarning, TqdmWarning +from .std import tqdm as std_tqdm + +__author__ = {"github.com/": ["richardsheridan", "casperdcl"]} +__all__ = ['tqdm_tk', 'ttkrange', 'tqdm', 'trange'] + + +class tqdm_tk(std_tqdm): # pragma: no cover + """ + Experimental Tkinter GUI version of tqdm! + + Note: Window interactivity suffers if `tqdm_tk` is not running within + a Tkinter mainloop and values are generated infrequently. In this case, + consider calling `tqdm_tk.refresh()` frequently in the Tk thread. + """ + + # TODO: @classmethod: write()? + + def __init__(self, *args, **kwargs): + """ + This class accepts the following parameters *in addition* to + the parameters accepted by `tqdm`. + + Parameters + ---------- + grab : bool, optional + Grab the input across all windows of the process. + tk_parent : `tkinter.Wm`, optional + Parent Tk window. + cancel_callback : Callable, optional + Create a cancel button and set `cancel_callback` to be called + when the cancel or window close button is clicked. + """ + kwargs = kwargs.copy() + kwargs['gui'] = True + # convert disable = None to False + kwargs['disable'] = bool(kwargs.get('disable', False)) + self._warn_leave = 'leave' in kwargs + grab = kwargs.pop('grab', False) + tk_parent = kwargs.pop('tk_parent', None) + self._cancel_callback = kwargs.pop('cancel_callback', None) + super().__init__(*args, **kwargs) + + if self.disable: + return + + if tk_parent is None: # Discover parent widget + try: + tk_parent = tkinter._default_root + except AttributeError: + raise AttributeError( + "`tk_parent` required when using `tkinter.NoDefaultRoot()`") + if tk_parent is None: # use new default root window as display + self._tk_window = tkinter.Tk() + else: # some other windows already exist + self._tk_window = tkinter.Toplevel() + else: + self._tk_window = tkinter.Toplevel(tk_parent) + + warn("GUI is experimental/alpha", TqdmExperimentalWarning, stacklevel=2) + self._tk_dispatching = self._tk_dispatching_helper() + + self._tk_window.protocol("WM_DELETE_WINDOW", self.cancel) + self._tk_window.wm_title(self.desc) + self._tk_window.wm_attributes("-topmost", 1) + self._tk_window.after(0, lambda: self._tk_window.wm_attributes("-topmost", 0)) + self._tk_n_var = tkinter.DoubleVar(self._tk_window, value=0) + self._tk_text_var = tkinter.StringVar(self._tk_window) + pbar_frame = ttk.Frame(self._tk_window, padding=5) + pbar_frame.pack() + _tk_label = ttk.Label(pbar_frame, textvariable=self._tk_text_var, + wraplength=600, anchor="center", justify="center") + _tk_label.pack() + self._tk_pbar = ttk.Progressbar( + pbar_frame, variable=self._tk_n_var, length=450) + if self.total is not None: + self._tk_pbar.configure(maximum=self.total) + else: + self._tk_pbar.configure(mode="indeterminate") + self._tk_pbar.pack() + if self._cancel_callback is not None: + _tk_button = ttk.Button(pbar_frame, text="Cancel", command=self.cancel) + _tk_button.pack() + if grab: + self._tk_window.grab_set() + + def close(self): + if self.disable: + return + + self.disable = True + + with self.get_lock(): + self._instances.remove(self) + + def _close(): + self._tk_window.after('idle', self._tk_window.destroy) + if not self._tk_dispatching: + self._tk_window.update() + + self._tk_window.protocol("WM_DELETE_WINDOW", _close) + + # if leave is set but we are self-dispatching, the left window is + # totally unresponsive unless the user manually dispatches + if not self.leave: + _close() + elif not self._tk_dispatching: + if self._warn_leave: + warn("leave flag ignored if not in tkinter mainloop", + TqdmWarning, stacklevel=2) + _close() + + def clear(self, *_, **__): + pass + + def display(self, *_, **__): + self._tk_n_var.set(self.n) + d = self.format_dict + # remove {bar} + d['bar_format'] = (d['bar_format'] or "{l_bar}{r_bar}").replace( + "{bar}", "") + msg = self.format_meter(**d) + if '' in msg: + msg = "".join(re.split(r'\|?\|?', msg, maxsplit=1)) + self._tk_text_var.set(msg) + if not self._tk_dispatching: + self._tk_window.update() + + def set_description(self, desc=None, refresh=True): + self.set_description_str(desc, refresh) + + def set_description_str(self, desc=None, refresh=True): + self.desc = desc + if not self.disable: + self._tk_window.wm_title(desc) + if refresh and not self._tk_dispatching: + self._tk_window.update() + + def cancel(self): + """ + `cancel_callback()` followed by `close()` + when close/cancel buttons clicked. + """ + if self._cancel_callback is not None: + self._cancel_callback() + self.close() + + def reset(self, total=None): + """ + Resets to 0 iterations for repeated use. + + Parameters + ---------- + total : int or float, optional. Total to use for the new bar. + """ + if hasattr(self, '_tk_pbar'): + if total is None: + self._tk_pbar.configure(maximum=100, mode="indeterminate") + else: + self._tk_pbar.configure(maximum=total, mode="determinate") + super().reset(total=total) + + @staticmethod + def _tk_dispatching_helper(): + """determine if Tkinter mainloop is dispatching events""" + codes = {tkinter.mainloop.__code__, tkinter.Misc.mainloop.__code__} + for frame in sys._current_frames().values(): + while frame: + if frame.f_code in codes: + return True + frame = frame.f_back + return False + + +def ttkrange(*args, **kwargs): + """Shortcut for `tqdm.tk.tqdm(range(*args), **kwargs)`.""" + return tqdm_tk(range(*args), **kwargs) + + +# Aliases +tqdm = tqdm_tk +trange = ttkrange diff --git a/venv/lib/python3.12/site-packages/tqdm/tqdm.1 b/venv/lib/python3.12/site-packages/tqdm/tqdm.1 new file mode 100644 index 00000000..b90ab4b9 --- /dev/null +++ b/venv/lib/python3.12/site-packages/tqdm/tqdm.1 @@ -0,0 +1,314 @@ +.\" Automatically generated by Pandoc 1.19.2 +.\" +.TH "TQDM" "1" "2015\-2021" "tqdm User Manuals" "" +.hy +.SH NAME +.PP +tqdm \- fast, extensible progress bar for Python and CLI +.SH SYNOPSIS +.PP +tqdm [\f[I]options\f[]] +.SH DESCRIPTION +.PP +See . +Can be used as a pipe: +.IP +.nf +\f[C] +$\ #\ count\ lines\ of\ code +$\ cat\ *.py\ |\ tqdm\ |\ wc\ \-l +327it\ [00:00,\ 981773.38it/s] +327 + +$\ #\ find\ all\ files +$\ find\ .\ \-name\ "*.py"\ |\ tqdm\ |\ wc\ \-l +432it\ [00:00,\ 833842.30it/s] +432 + +#\ ...\ and\ more\ info +$\ find\ .\ \-name\ \[aq]*.py\[aq]\ \-exec\ wc\ \-l\ \\{}\ \\;\ \\ +\ \ |\ tqdm\ \-\-total\ 432\ \-\-unit\ files\ \-\-desc\ counting\ \\ +\ \ |\ awk\ \[aq]{\ sum\ +=\ $1\ };\ END\ {\ print\ sum\ }\[aq] +counting:\ 100%|█████████|\ 432/432\ [00:00<00:00,\ 794361.83files/s] +131998 +\f[] +.fi +.SH OPTIONS +.TP +.B \-h, \-\-help +Print this help and exit. +.RS +.RE +.TP +.B \-v, \-\-version +Print version and exit. +.RS +.RE +.TP +.B \-\-desc=\f[I]desc\f[] +str, optional. +Prefix for the progressbar. +.RS +.RE +.TP +.B \-\-total=\f[I]total\f[] +int or float, optional. +The number of expected iterations. +If unspecified, len(iterable) is used if possible. +If float("inf") or as a last resort, only basic progress statistics are +displayed (no ETA, no progressbar). +If \f[C]gui\f[] is True and this parameter needs subsequent updating, +specify an initial arbitrary large positive number, e.g. +9e9. +.RS +.RE +.TP +.B \-\-leave +bool, optional. +If [default: True], keeps all traces of the progressbar upon termination +of iteration. +If \f[C]None\f[], will leave only if \f[C]position\f[] is \f[C]0\f[]. +.RS +.RE +.TP +.B \-\-ncols=\f[I]ncols\f[] +int, optional. +The width of the entire output message. +If specified, dynamically resizes the progressbar to stay within this +bound. +If unspecified, attempts to use environment width. +The fallback is a meter width of 10 and no limit for the counter and +statistics. +If 0, will not print any meter (only stats). +.RS +.RE +.TP +.B \-\-mininterval=\f[I]mininterval\f[] +float, optional. +Minimum progress display update interval [default: 0.1] seconds. +.RS +.RE +.TP +.B \-\-maxinterval=\f[I]maxinterval\f[] +float, optional. +Maximum progress display update interval [default: 10] seconds. +Automatically adjusts \f[C]miniters\f[] to correspond to +\f[C]mininterval\f[] after long display update lag. +Only works if \f[C]dynamic_miniters\f[] or monitor thread is enabled. +.RS +.RE +.TP +.B \-\-miniters=\f[I]miniters\f[] +int or float, optional. +Minimum progress display update interval, in iterations. +If 0 and \f[C]dynamic_miniters\f[], will automatically adjust to equal +\f[C]mininterval\f[] (more CPU efficient, good for tight loops). +If > 0, will skip display of specified number of iterations. +Tweak this and \f[C]mininterval\f[] to get very efficient loops. +If your progress is erratic with both fast and slow iterations (network, +skipping items, etc) you should set miniters=1. +.RS +.RE +.TP +.B \-\-ascii=\f[I]ascii\f[] +bool or str, optional. +If unspecified or False, use unicode (smooth blocks) to fill the meter. +The fallback is to use ASCII characters " 123456789#". +.RS +.RE +.TP +.B \-\-disable +bool, optional. +Whether to disable the entire progressbar wrapper [default: False]. +If set to None, disable on non\-TTY. +.RS +.RE +.TP +.B \-\-unit=\f[I]unit\f[] +str, optional. +String that will be used to define the unit of each iteration [default: +it]. +.RS +.RE +.TP +.B \-\-unit\-scale=\f[I]unit_scale\f[] +bool or int or float, optional. +If 1 or True, the number of iterations will be reduced/scaled +automatically and a metric prefix following the International System of +Units standard will be added (kilo, mega, etc.) [default: False]. +If any other non\-zero number, will scale \f[C]total\f[] and \f[C]n\f[]. +.RS +.RE +.TP +.B \-\-dynamic\-ncols +bool, optional. +If set, constantly alters \f[C]ncols\f[] and \f[C]nrows\f[] to the +environment (allowing for window resizes) [default: False]. +.RS +.RE +.TP +.B \-\-smoothing=\f[I]smoothing\f[] +float, optional. +Exponential moving average smoothing factor for speed estimates (ignored +in GUI mode). +Ranges from 0 (average speed) to 1 (current/instantaneous speed) +[default: 0.3]. +.RS +.RE +.TP +.B \-\-bar\-format=\f[I]bar_format\f[] +str, optional. +Specify a custom bar string formatting. +May impact performance. +[default: \[aq]{l_bar}{bar}{r_bar}\[aq]], where l_bar=\[aq]{desc}: +{percentage:3.0f}%|\[aq] and r_bar=\[aq]| {n_fmt}/{total_fmt} +[{elapsed}<{remaining}, \[aq] \[aq]{rate_fmt}{postfix}]\[aq] Possible +vars: l_bar, bar, r_bar, n, n_fmt, total, total_fmt, percentage, +elapsed, elapsed_s, ncols, nrows, desc, unit, rate, rate_fmt, +rate_noinv, rate_noinv_fmt, rate_inv, rate_inv_fmt, postfix, +unit_divisor, remaining, remaining_s, eta. +Note that a trailing ": " is automatically removed after {desc} if the +latter is empty. +.RS +.RE +.TP +.B \-\-initial=\f[I]initial\f[] +int or float, optional. +The initial counter value. +Useful when restarting a progress bar [default: 0]. +If using float, consider specifying \f[C]{n:.3f}\f[] or similar in +\f[C]bar_format\f[], or specifying \f[C]unit_scale\f[]. +.RS +.RE +.TP +.B \-\-position=\f[I]position\f[] +int, optional. +Specify the line offset to print this bar (starting from 0) Automatic if +unspecified. +Useful to manage multiple bars at once (eg, from threads). +.RS +.RE +.TP +.B \-\-postfix=\f[I]postfix\f[] +dict or *, optional. +Specify additional stats to display at the end of the bar. +Calls \f[C]set_postfix(**postfix)\f[] if possible (dict). +.RS +.RE +.TP +.B \-\-unit\-divisor=\f[I]unit_divisor\f[] +float, optional. +[default: 1000], ignored unless \f[C]unit_scale\f[] is True. +.RS +.RE +.TP +.B \-\-write\-bytes +bool, optional. +Whether to write bytes. +If (default: False) will write unicode. +.RS +.RE +.TP +.B \-\-lock\-args=\f[I]lock_args\f[] +tuple, optional. +Passed to \f[C]refresh\f[] for intermediate output (initialisation, +iterating, and updating). +.RS +.RE +.TP +.B \-\-nrows=\f[I]nrows\f[] +int, optional. +The screen height. +If specified, hides nested bars outside this bound. +If unspecified, attempts to use environment height. +The fallback is 20. +.RS +.RE +.TP +.B \-\-colour=\f[I]colour\f[] +str, optional. +Bar colour (e.g. +\[aq]green\[aq], \[aq]#00ff00\[aq]). +.RS +.RE +.TP +.B \-\-delay=\f[I]delay\f[] +float, optional. +Don\[aq]t display until [default: 0] seconds have elapsed. +.RS +.RE +.TP +.B \-\-delim=\f[I]delim\f[] +chr, optional. +Delimiting character [default: \[aq]\\n\[aq]]. +Use \[aq]\\0\[aq] for null. +N.B.: on Windows systems, Python converts \[aq]\\n\[aq] to +\[aq]\\r\\n\[aq]. +.RS +.RE +.TP +.B \-\-buf\-size=\f[I]buf_size\f[] +int, optional. +String buffer size in bytes [default: 256] used when \f[C]delim\f[] is +specified. +.RS +.RE +.TP +.B \-\-bytes +bool, optional. +If true, will count bytes, ignore \f[C]delim\f[], and default +\f[C]unit_scale\f[] to True, \f[C]unit_divisor\f[] to 1024, and +\f[C]unit\f[] to \[aq]B\[aq]. +.RS +.RE +.TP +.B \-\-tee +bool, optional. +If true, passes \f[C]stdin\f[] to both \f[C]stderr\f[] and +\f[C]stdout\f[]. +.RS +.RE +.TP +.B \-\-update +bool, optional. +If true, will treat input as newly elapsed iterations, i.e. +numbers to pass to \f[C]update()\f[]. +Note that this is slow (~2e5 it/s) since every input must be decoded as +a number. +.RS +.RE +.TP +.B \-\-update\-to +bool, optional. +If true, will treat input as total elapsed iterations, i.e. +numbers to assign to \f[C]self.n\f[]. +Note that this is slow (~2e5 it/s) since every input must be decoded as +a number. +.RS +.RE +.TP +.B \-\-null +bool, optional. +If true, will discard input (no stdout). +.RS +.RE +.TP +.B \-\-manpath=\f[I]manpath\f[] +str, optional. +Directory in which to install tqdm man pages. +.RS +.RE +.TP +.B \-\-comppath=\f[I]comppath\f[] +str, optional. +Directory in which to place tqdm completion. +.RS +.RE +.TP +.B \-\-log=\f[I]log\f[] +str, optional. +CRITICAL|FATAL|ERROR|WARN(ING)|[default: \[aq]INFO\[aq]]|DEBUG|NOTSET. +.RS +.RE +.SH AUTHORS +tqdm developers . diff --git a/venv/lib/python3.12/site-packages/tqdm/utils.py b/venv/lib/python3.12/site-packages/tqdm/utils.py new file mode 100644 index 00000000..af3ec7de --- /dev/null +++ b/venv/lib/python3.12/site-packages/tqdm/utils.py @@ -0,0 +1,399 @@ +""" +General helpers required for `tqdm.std`. +""" +import os +import re +import sys +from functools import partial, partialmethod, wraps +from inspect import signature +# TODO consider using wcswidth third-party package for 0-width characters +from unicodedata import east_asian_width +from warnings import warn +from weakref import proxy + +_range, _unich, _unicode, _basestring = range, chr, str, str +CUR_OS = sys.platform +IS_WIN = any(CUR_OS.startswith(i) for i in ['win32', 'cygwin']) +IS_NIX = any(CUR_OS.startswith(i) for i in ['aix', 'linux', 'darwin', 'freebsd']) +RE_ANSI = re.compile(r"\x1b\[[;\d]*[A-Za-z]") + +try: + if IS_WIN: + import colorama + else: + raise ImportError +except ImportError: + colorama = None +else: + try: + colorama.init(strip=False) + except TypeError: + colorama.init() + + +def envwrap(prefix, types=None, is_method=False): + """ + Override parameter defaults via `os.environ[prefix + param_name]`. + Maps UPPER_CASE env vars map to lower_case param names. + camelCase isn't supported (because Windows ignores case). + + Precedence (highest first): + + - call (`foo(a=3)`) + - environ (`FOO_A=2`) + - signature (`def foo(a=1)`) + + Parameters + ---------- + prefix : str + Env var prefix, e.g. "FOO_" + types : dict, optional + Fallback mappings `{'param_name': type, ...}` if types cannot be + inferred from function signature. + Consider using `types=collections.defaultdict(lambda: ast.literal_eval)`. + is_method : bool, optional + Whether to use `functools.partialmethod`. If (default: False) use `functools.partial`. + + Examples + -------- + ``` + $ cat foo.py + from tqdm.utils import envwrap + @envwrap("FOO_") + def test(a=1, b=2, c=3): + print(f"received: a={a}, b={b}, c={c}") + + $ FOO_A=42 FOO_C=1337 python -c 'import foo; foo.test(c=99)' + received: a=42, b=2, c=99 + ``` + """ + if types is None: + types = {} + i = len(prefix) + env_overrides = {k[i:].lower(): v for k, v in os.environ.items() if k.startswith(prefix)} + part = partialmethod if is_method else partial + + def wrap(func): + params = signature(func).parameters + # ignore unknown env vars + overrides = {k: v for k, v in env_overrides.items() if k in params} + # infer overrides' `type`s + for k in overrides: + param = params[k] + if param.annotation is not param.empty: # typehints + for typ in getattr(param.annotation, '__args__', (param.annotation,)): + try: + overrides[k] = typ(overrides[k]) + except Exception: + pass + else: + break + elif param.default is not None: # type of default value + overrides[k] = type(param.default)(overrides[k]) + else: + try: # `types` fallback + overrides[k] = types[k](overrides[k]) + except KeyError: # keep unconverted (`str`) + pass + return part(func, **overrides) + return wrap + + +class FormatReplace(object): + """ + >>> a = FormatReplace('something') + >>> f"{a:5d}" + 'something' + """ # NOQA: P102 + def __init__(self, replace=''): + self.replace = replace + self.format_called = 0 + + def __format__(self, _): + self.format_called += 1 + return self.replace + + +class Comparable(object): + """Assumes child has self._comparable attr/@property""" + def __lt__(self, other): + return self._comparable < other._comparable + + def __le__(self, other): + return (self < other) or (self == other) + + def __eq__(self, other): + return self._comparable == other._comparable + + def __ne__(self, other): + return not self == other + + def __gt__(self, other): + return not self <= other + + def __ge__(self, other): + return not self < other + + +class ObjectWrapper(object): + def __getattr__(self, name): + return getattr(self._wrapped, name) + + def __setattr__(self, name, value): + return setattr(self._wrapped, name, value) + + def wrapper_getattr(self, name): + """Actual `self.getattr` rather than self._wrapped.getattr""" + try: + return object.__getattr__(self, name) + except AttributeError: # py2 + return getattr(self, name) + + def wrapper_setattr(self, name, value): + """Actual `self.setattr` rather than self._wrapped.setattr""" + return object.__setattr__(self, name, value) + + def __init__(self, wrapped): + """ + Thin wrapper around a given object + """ + self.wrapper_setattr('_wrapped', wrapped) + + +class SimpleTextIOWrapper(ObjectWrapper): + """ + Change only `.write()` of the wrapped object by encoding the passed + value and passing the result to the wrapped object's `.write()` method. + """ + # pylint: disable=too-few-public-methods + def __init__(self, wrapped, encoding): + super().__init__(wrapped) + self.wrapper_setattr('encoding', encoding) + + def write(self, s): + """ + Encode `s` and pass to the wrapped object's `.write()` method. + """ + return self._wrapped.write(s.encode(self.wrapper_getattr('encoding'))) + + def __eq__(self, other): + return self._wrapped == getattr(other, '_wrapped', other) + + +class DisableOnWriteError(ObjectWrapper): + """ + Disable the given `tqdm_instance` upon `write()` or `flush()` errors. + """ + @staticmethod + def disable_on_exception(tqdm_instance, func): + """ + Quietly set `tqdm_instance.miniters=inf` if `func` raises `errno=5`. + """ + tqdm_instance = proxy(tqdm_instance) + + def inner(*args, **kwargs): + try: + return func(*args, **kwargs) + except OSError as e: + if e.errno != 5: + raise + try: + tqdm_instance.miniters = float('inf') + except ReferenceError: + pass + except ValueError as e: + if 'closed' not in str(e): + raise + try: + tqdm_instance.miniters = float('inf') + except ReferenceError: + pass + return inner + + def __init__(self, wrapped, tqdm_instance): + super().__init__(wrapped) + if hasattr(wrapped, 'write'): + self.wrapper_setattr( + 'write', self.disable_on_exception(tqdm_instance, wrapped.write)) + if hasattr(wrapped, 'flush'): + self.wrapper_setattr( + 'flush', self.disable_on_exception(tqdm_instance, wrapped.flush)) + + def __eq__(self, other): + return self._wrapped == getattr(other, '_wrapped', other) + + +class CallbackIOWrapper(ObjectWrapper): + def __init__(self, callback, stream, method="read"): + """ + Wrap a given `file`-like object's `read()` or `write()` to report + lengths to the given `callback` + """ + super().__init__(stream) + func = getattr(stream, method) + if method == "write": + @wraps(func) + def write(data, *args, **kwargs): + res = func(data, *args, **kwargs) + callback(len(data)) + return res + self.wrapper_setattr('write', write) + elif method == "read": + @wraps(func) + def read(*args, **kwargs): + data = func(*args, **kwargs) + callback(len(data)) + return data + self.wrapper_setattr('read', read) + else: + raise KeyError("Can only wrap read/write methods") + + +def _is_utf(encoding): + try: + u'\u2588\u2589'.encode(encoding) + except UnicodeEncodeError: + return False + except Exception: + try: + return encoding.lower().startswith('utf-') or ('U8' == encoding) + except Exception: + return False + else: + return True + + +def _supports_unicode(fp): + try: + return _is_utf(fp.encoding) + except AttributeError: + return False + + +def _is_ascii(s): + if isinstance(s, str): + for c in s: + if ord(c) > 255: + return False + return True + return _supports_unicode(s) + + +def _screen_shape_wrapper(): # pragma: no cover + """ + Return a function which returns console dimensions (width, height). + Supported: linux, osx, windows, cygwin. + """ + _screen_shape = None + if IS_WIN: + _screen_shape = _screen_shape_windows + if _screen_shape is None: + _screen_shape = _screen_shape_tput + if IS_NIX: + _screen_shape = _screen_shape_linux + return _screen_shape + + +def _screen_shape_windows(fp): # pragma: no cover + try: + import struct + from ctypes import create_string_buffer, windll + from sys import stdin, stdout + + io_handle = -12 # assume stderr + if fp == stdin: + io_handle = -10 + elif fp == stdout: + io_handle = -11 + + h = windll.kernel32.GetStdHandle(io_handle) + csbi = create_string_buffer(22) + res = windll.kernel32.GetConsoleScreenBufferInfo(h, csbi) + if res: + (_bufx, _bufy, _curx, _cury, _wattr, left, top, right, bottom, + _maxx, _maxy) = struct.unpack("hhhhHhhhhhh", csbi.raw) + return right - left, bottom - top # +1 + except Exception: # nosec + pass + return None, None + + +def _screen_shape_tput(*_): # pragma: no cover + """cygwin xterm (windows)""" + try: + import shlex + from subprocess import check_call # nosec + return [int(check_call(shlex.split('tput ' + i))) - 1 + for i in ('cols', 'lines')] + except Exception: # nosec + pass + return None, None + + +def _screen_shape_linux(fp): # pragma: no cover + + try: + from array import array + from fcntl import ioctl + from termios import TIOCGWINSZ + except ImportError: + return None, None + else: + try: + rows, cols = array('h', ioctl(fp, TIOCGWINSZ, '\0' * 8))[:2] + return cols, rows + except Exception: + try: + return [int(os.environ[i]) - 1 for i in ("COLUMNS", "LINES")] + except (KeyError, ValueError): + return None, None + + +def _environ_cols_wrapper(): # pragma: no cover + """ + Return a function which returns console width. + Supported: linux, osx, windows, cygwin. + """ + warn("Use `_screen_shape_wrapper()(file)[0]` instead of" + " `_environ_cols_wrapper()(file)`", DeprecationWarning, stacklevel=2) + shape = _screen_shape_wrapper() + if not shape: + return None + + @wraps(shape) + def inner(fp): + return shape(fp)[0] + + return inner + + +def _term_move_up(): # pragma: no cover + return '' if (os.name == 'nt') and (colorama is None) else '\x1b[A' + + +def _text_width(s): + return sum(2 if east_asian_width(ch) in 'FW' else 1 for ch in str(s)) + + +def disp_len(data): + """ + Returns the real on-screen length of a string which may contain + ANSI control codes and wide chars. + """ + return _text_width(RE_ANSI.sub('', data)) + + +def disp_trim(data, length): + """ + Trim a string which may contain ANSI control characters. + """ + if len(data) == disp_len(data): + return data[:length] + + ansi_present = bool(RE_ANSI.search(data)) + while disp_len(data) > length: # carefully delete one char at a time + data = data[:-1] + if ansi_present and bool(RE_ANSI.search(data)): + # assume ANSI reset is required + return data if data.endswith("\033[0m") else data + "\033[0m" + return data diff --git a/venv/lib/python3.12/site-packages/tqdm/version.py b/venv/lib/python3.12/site-packages/tqdm/version.py new file mode 100644 index 00000000..11cbaea7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/tqdm/version.py @@ -0,0 +1,9 @@ +"""`tqdm` version detector. Precedence: installed dist, git, 'UNKNOWN'.""" +try: + from ._dist_ver import __version__ +except ImportError: + try: + from setuptools_scm import get_version + __version__ = get_version(root='..', relative_to=__file__) + except (ImportError, LookupError): + __version__ = "UNKNOWN" diff --git a/venv/lib/python3.12/site-packages/typing_extensions-4.15.0.dist-info/INSTALLER b/venv/lib/python3.12/site-packages/typing_extensions-4.15.0.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/venv/lib/python3.12/site-packages/typing_extensions-4.15.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.12/site-packages/typing_extensions-4.15.0.dist-info/METADATA b/venv/lib/python3.12/site-packages/typing_extensions-4.15.0.dist-info/METADATA new file mode 100644 index 00000000..b09cb50e --- /dev/null +++ b/venv/lib/python3.12/site-packages/typing_extensions-4.15.0.dist-info/METADATA @@ -0,0 +1,72 @@ +Metadata-Version: 2.4 +Name: typing_extensions +Version: 4.15.0 +Summary: Backported and Experimental Type Hints for Python 3.9+ +Keywords: annotations,backport,checker,checking,function,hinting,hints,type,typechecking,typehinting,typehints,typing +Author-email: "Guido van Rossum, Jukka Lehtosalo, Łukasz Langa, Michael Lee" +Requires-Python: >=3.9 +Description-Content-Type: text/markdown +License-Expression: PSF-2.0 +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Console +Classifier: Intended Audience :: Developers +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: 3.14 +Classifier: Topic :: Software Development +License-File: LICENSE +Project-URL: Bug Tracker, https://github.com/python/typing_extensions/issues +Project-URL: Changes, https://github.com/python/typing_extensions/blob/main/CHANGELOG.md +Project-URL: Documentation, https://typing-extensions.readthedocs.io/ +Project-URL: Home, https://github.com/python/typing_extensions +Project-URL: Q & A, https://github.com/python/typing/discussions +Project-URL: Repository, https://github.com/python/typing_extensions + +# Typing Extensions + +[![Chat at https://gitter.im/python/typing](https://badges.gitter.im/python/typing.svg)](https://gitter.im/python/typing) + +[Documentation](https://typing-extensions.readthedocs.io/en/latest/#) – +[PyPI](https://pypi.org/project/typing-extensions/) + +## Overview + +The `typing_extensions` module serves two related purposes: + +- Enable use of new type system features on older Python versions. For example, + `typing.TypeGuard` is new in Python 3.10, but `typing_extensions` allows + users on previous Python versions to use it too. +- Enable experimentation with new type system PEPs before they are accepted and + added to the `typing` module. + +`typing_extensions` is treated specially by static type checkers such as +mypy and pyright. Objects defined in `typing_extensions` are treated the same +way as equivalent forms in `typing`. + +`typing_extensions` uses +[Semantic Versioning](https://semver.org/). The +major version will be incremented only for backwards-incompatible changes. +Therefore, it's safe to depend +on `typing_extensions` like this: `typing_extensions ~=x.y`, +where `x.y` is the first version that includes all features you need. +[This](https://packaging.python.org/en/latest/specifications/version-specifiers/#compatible-release) +is equivalent to `typing_extensions >=x.y, <(x+1)`. Do not depend on `~= x.y.z` +unless you really know what you're doing; that defeats the purpose of +semantic versioning. + +## Included items + +See [the documentation](https://typing-extensions.readthedocs.io/en/latest/#) for a +complete listing of module contents. + +## Contributing + +See [CONTRIBUTING.md](https://github.com/python/typing_extensions/blob/main/CONTRIBUTING.md) +for how to contribute to `typing_extensions`. + diff --git a/venv/lib/python3.12/site-packages/typing_extensions-4.15.0.dist-info/RECORD b/venv/lib/python3.12/site-packages/typing_extensions-4.15.0.dist-info/RECORD new file mode 100644 index 00000000..6ab09b53 --- /dev/null +++ b/venv/lib/python3.12/site-packages/typing_extensions-4.15.0.dist-info/RECORD @@ -0,0 +1,7 @@ +__pycache__/typing_extensions.cpython-312.pyc,, +typing_extensions-4.15.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +typing_extensions-4.15.0.dist-info/METADATA,sha256=wTg3j-jxiTSsmd4GBTXFPsbBOu7WXpTDJkHafuMZKnI,3259 +typing_extensions-4.15.0.dist-info/RECORD,, +typing_extensions-4.15.0.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82 +typing_extensions-4.15.0.dist-info/licenses/LICENSE,sha256=Oy-B_iHRgcSZxZolbI4ZaEVdZonSaaqFNzv7avQdo78,13936 +typing_extensions.py,sha256=Qz0R0XDTok0usGXrwb_oSM6n49fOaFZ6tSvqLUwvftg,160429 diff --git a/venv/lib/python3.12/site-packages/werkzeug-3.1.3.dist-info/WHEEL b/venv/lib/python3.12/site-packages/typing_extensions-4.15.0.dist-info/WHEEL similarity index 71% rename from venv/lib/python3.12/site-packages/werkzeug-3.1.3.dist-info/WHEEL rename to venv/lib/python3.12/site-packages/typing_extensions-4.15.0.dist-info/WHEEL index e3c6feef..d8b9936d 100644 --- a/venv/lib/python3.12/site-packages/werkzeug-3.1.3.dist-info/WHEEL +++ b/venv/lib/python3.12/site-packages/typing_extensions-4.15.0.dist-info/WHEEL @@ -1,4 +1,4 @@ Wheel-Version: 1.0 -Generator: flit 3.10.1 +Generator: flit 3.12.0 Root-Is-Purelib: true Tag: py3-none-any diff --git a/venv/lib/python3.12/site-packages/typing_extensions-4.15.0.dist-info/licenses/LICENSE b/venv/lib/python3.12/site-packages/typing_extensions-4.15.0.dist-info/licenses/LICENSE new file mode 100644 index 00000000..f26bcf4d --- /dev/null +++ b/venv/lib/python3.12/site-packages/typing_extensions-4.15.0.dist-info/licenses/LICENSE @@ -0,0 +1,279 @@ +A. HISTORY OF THE SOFTWARE +========================== + +Python was created in the early 1990s by Guido van Rossum at Stichting +Mathematisch Centrum (CWI, see https://www.cwi.nl) in the Netherlands +as a successor of a language called ABC. Guido remains Python's +principal author, although it includes many contributions from others. + +In 1995, Guido continued his work on Python at the Corporation for +National Research Initiatives (CNRI, see https://www.cnri.reston.va.us) +in Reston, Virginia where he released several versions of the +software. + +In May 2000, Guido and the Python core development team moved to +BeOpen.com to form the BeOpen PythonLabs team. In October of the same +year, the PythonLabs team moved to Digital Creations, which became +Zope Corporation. In 2001, the Python Software Foundation (PSF, see +https://www.python.org/psf/) was formed, a non-profit organization +created specifically to own Python-related Intellectual Property. +Zope Corporation was a sponsoring member of the PSF. + +All Python releases are Open Source (see https://opensource.org for +the Open Source Definition). Historically, most, but not all, Python +releases have also been GPL-compatible; the table below summarizes +the various releases. + + Release Derived Year Owner GPL- + from compatible? (1) + + 0.9.0 thru 1.2 1991-1995 CWI yes + 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes + 1.6 1.5.2 2000 CNRI no + 2.0 1.6 2000 BeOpen.com no + 1.6.1 1.6 2001 CNRI yes (2) + 2.1 2.0+1.6.1 2001 PSF no + 2.0.1 2.0+1.6.1 2001 PSF yes + 2.1.1 2.1+2.0.1 2001 PSF yes + 2.1.2 2.1.1 2002 PSF yes + 2.1.3 2.1.2 2002 PSF yes + 2.2 and above 2.1.1 2001-now PSF yes + +Footnotes: + +(1) GPL-compatible doesn't mean that we're distributing Python under + the GPL. All Python licenses, unlike the GPL, let you distribute + a modified version without making your changes open source. The + GPL-compatible licenses make it possible to combine Python with + other software that is released under the GPL; the others don't. + +(2) According to Richard Stallman, 1.6.1 is not GPL-compatible, + because its license has a choice of law clause. According to + CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1 + is "not incompatible" with the GPL. + +Thanks to the many outside volunteers who have worked under Guido's +direction to make these releases possible. + + +B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON +=============================================================== + +Python software and documentation are licensed under the +Python Software Foundation License Version 2. + +Starting with Python 3.8.6, examples, recipes, and other code in +the documentation are dual licensed under the PSF License Version 2 +and the Zero-Clause BSD license. + +Some software incorporated into Python is under different licenses. +The licenses are listed with code falling under that license. + + +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +-------------------------------------------- + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF hereby +grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, +analyze, test, perform and/or display publicly, prepare derivative works, +distribute, and otherwise use Python alone or in any derivative version, +provided, however, that PSF's License Agreement and PSF's notice of copyright, +i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, +2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022, 2023 Python Software Foundation; +All Rights Reserved" are retained in Python alone or in any derivative version +prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0 +------------------------------------------- + +BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1 + +1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an +office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the +Individual or Organization ("Licensee") accessing and otherwise using +this software in source or binary form and its associated +documentation ("the Software"). + +2. Subject to the terms and conditions of this BeOpen Python License +Agreement, BeOpen hereby grants Licensee a non-exclusive, +royalty-free, world-wide license to reproduce, analyze, test, perform +and/or display publicly, prepare derivative works, distribute, and +otherwise use the Software alone or in any derivative version, +provided, however, that the BeOpen Python License is retained in the +Software, alone or in any derivative version prepared by Licensee. + +3. BeOpen is making the Software available to Licensee on an "AS IS" +basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE +SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS +AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY +DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +5. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +6. This License Agreement shall be governed by and interpreted in all +respects by the law of the State of California, excluding conflict of +law provisions. Nothing in this License Agreement shall be deemed to +create any relationship of agency, partnership, or joint venture +between BeOpen and Licensee. This License Agreement does not grant +permission to use BeOpen trademarks or trade names in a trademark +sense to endorse or promote products or services of Licensee, or any +third party. As an exception, the "BeOpen Python" logos available at +http://www.pythonlabs.com/logos.html may be used according to the +permissions granted on that web page. + +7. By copying, installing or otherwise using the software, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1 +--------------------------------------- + +1. This LICENSE AGREEMENT is between the Corporation for National +Research Initiatives, having an office at 1895 Preston White Drive, +Reston, VA 20191 ("CNRI"), and the Individual or Organization +("Licensee") accessing and otherwise using Python 1.6.1 software in +source or binary form and its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, CNRI +hereby grants Licensee a nonexclusive, royalty-free, world-wide +license to reproduce, analyze, test, perform and/or display publicly, +prepare derivative works, distribute, and otherwise use Python 1.6.1 +alone or in any derivative version, provided, however, that CNRI's +License Agreement and CNRI's notice of copyright, i.e., "Copyright (c) +1995-2001 Corporation for National Research Initiatives; All Rights +Reserved" are retained in Python 1.6.1 alone or in any derivative +version prepared by Licensee. Alternately, in lieu of CNRI's License +Agreement, Licensee may substitute the following text (omitting the +quotes): "Python 1.6.1 is made available subject to the terms and +conditions in CNRI's License Agreement. This Agreement together with +Python 1.6.1 may be located on the internet using the following +unique, persistent identifier (known as a handle): 1895.22/1013. This +Agreement may also be obtained from a proxy server on the internet +using the following URL: http://hdl.handle.net/1895.22/1013". + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python 1.6.1 or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python 1.6.1. + +4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS" +basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. This License Agreement shall be governed by the federal +intellectual property law of the United States, including without +limitation the federal copyright law, and, to the extent such +U.S. federal law does not apply, by the law of the Commonwealth of +Virginia, excluding Virginia's conflict of law provisions. +Notwithstanding the foregoing, with regard to derivative works based +on Python 1.6.1 that incorporate non-separable material that was +previously distributed under the GNU General Public License (GPL), the +law of the Commonwealth of Virginia shall govern this License +Agreement only as to issues arising under or with respect to +Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this +License Agreement shall be deemed to create any relationship of +agency, partnership, or joint venture between CNRI and Licensee. This +License Agreement does not grant permission to use CNRI trademarks or +trade name in a trademark sense to endorse or promote products or +services of Licensee, or any third party. + +8. By clicking on the "ACCEPT" button where indicated, or by copying, +installing or otherwise using Python 1.6.1, Licensee agrees to be +bound by the terms and conditions of this License Agreement. + + ACCEPT + + +CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2 +-------------------------------------------------- + +Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam, +The Netherlands. All rights reserved. + +Permission to use, copy, modify, and distribute this software and its +documentation for any purpose and without fee is hereby granted, +provided that the above copyright notice appear in all copies and that +both that copyright notice and this permission notice appear in +supporting documentation, and that the name of Stichting Mathematisch +Centrum or CWI not be used in advertising or publicity pertaining to +distribution of the software without specific, written prior +permission. + +STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO +THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE +FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +ZERO-CLAUSE BSD LICENSE FOR CODE IN THE PYTHON DOCUMENTATION +---------------------------------------------------------------------- + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. diff --git a/venv/lib/python3.12/site-packages/typing_extensions.py b/venv/lib/python3.12/site-packages/typing_extensions.py new file mode 100644 index 00000000..77f33e16 --- /dev/null +++ b/venv/lib/python3.12/site-packages/typing_extensions.py @@ -0,0 +1,4317 @@ +import abc +import builtins +import collections +import collections.abc +import contextlib +import enum +import functools +import inspect +import io +import keyword +import operator +import sys +import types as _types +import typing +import warnings + +# Breakpoint: https://github.com/python/cpython/pull/119891 +if sys.version_info >= (3, 14): + import annotationlib + +__all__ = [ + # Super-special typing primitives. + 'Any', + 'ClassVar', + 'Concatenate', + 'Final', + 'LiteralString', + 'ParamSpec', + 'ParamSpecArgs', + 'ParamSpecKwargs', + 'Self', + 'Type', + 'TypeVar', + 'TypeVarTuple', + 'Unpack', + + # ABCs (from collections.abc). + 'Awaitable', + 'AsyncIterator', + 'AsyncIterable', + 'Coroutine', + 'AsyncGenerator', + 'AsyncContextManager', + 'Buffer', + 'ChainMap', + + # Concrete collection types. + 'ContextManager', + 'Counter', + 'Deque', + 'DefaultDict', + 'NamedTuple', + 'OrderedDict', + 'TypedDict', + + # Structural checks, a.k.a. protocols. + 'SupportsAbs', + 'SupportsBytes', + 'SupportsComplex', + 'SupportsFloat', + 'SupportsIndex', + 'SupportsInt', + 'SupportsRound', + 'Reader', + 'Writer', + + # One-off things. + 'Annotated', + 'assert_never', + 'assert_type', + 'clear_overloads', + 'dataclass_transform', + 'deprecated', + 'disjoint_base', + 'Doc', + 'evaluate_forward_ref', + 'get_overloads', + 'final', + 'Format', + 'get_annotations', + 'get_args', + 'get_origin', + 'get_original_bases', + 'get_protocol_members', + 'get_type_hints', + 'IntVar', + 'is_protocol', + 'is_typeddict', + 'Literal', + 'NewType', + 'overload', + 'override', + 'Protocol', + 'Sentinel', + 'reveal_type', + 'runtime', + 'runtime_checkable', + 'Text', + 'TypeAlias', + 'TypeAliasType', + 'TypeForm', + 'TypeGuard', + 'TypeIs', + 'TYPE_CHECKING', + 'type_repr', + 'Never', + 'NoReturn', + 'ReadOnly', + 'Required', + 'NotRequired', + 'NoDefault', + 'NoExtraItems', + + # Pure aliases, have always been in typing + 'AbstractSet', + 'AnyStr', + 'BinaryIO', + 'Callable', + 'Collection', + 'Container', + 'Dict', + 'ForwardRef', + 'FrozenSet', + 'Generator', + 'Generic', + 'Hashable', + 'IO', + 'ItemsView', + 'Iterable', + 'Iterator', + 'KeysView', + 'List', + 'Mapping', + 'MappingView', + 'Match', + 'MutableMapping', + 'MutableSequence', + 'MutableSet', + 'Optional', + 'Pattern', + 'Reversible', + 'Sequence', + 'Set', + 'Sized', + 'TextIO', + 'Tuple', + 'Union', + 'ValuesView', + 'cast', + 'no_type_check', + 'no_type_check_decorator', +] + +# for backward compatibility +PEP_560 = True +GenericMeta = type +# Breakpoint: https://github.com/python/cpython/pull/116129 +_PEP_696_IMPLEMENTED = sys.version_info >= (3, 13, 0, "beta") + +# Added with bpo-45166 to 3.10.1+ and some 3.9 versions +_FORWARD_REF_HAS_CLASS = "__forward_is_class__" in typing.ForwardRef.__slots__ + +# The functions below are modified copies of typing internal helpers. +# They are needed by _ProtocolMeta and they provide support for PEP 646. + + +class _Sentinel: + def __repr__(self): + return "" + + +_marker = _Sentinel() + + +# Breakpoint: https://github.com/python/cpython/pull/27342 +if sys.version_info >= (3, 10): + def _should_collect_from_parameters(t): + return isinstance( + t, (typing._GenericAlias, _types.GenericAlias, _types.UnionType) + ) +else: + def _should_collect_from_parameters(t): + return isinstance(t, (typing._GenericAlias, _types.GenericAlias)) + + +NoReturn = typing.NoReturn + +# Some unconstrained type variables. These are used by the container types. +# (These are not for export.) +T = typing.TypeVar('T') # Any type. +KT = typing.TypeVar('KT') # Key type. +VT = typing.TypeVar('VT') # Value type. +T_co = typing.TypeVar('T_co', covariant=True) # Any type covariant containers. +T_contra = typing.TypeVar('T_contra', contravariant=True) # Ditto contravariant. + + +# Breakpoint: https://github.com/python/cpython/pull/31841 +if sys.version_info >= (3, 11): + from typing import Any +else: + + class _AnyMeta(type): + def __instancecheck__(self, obj): + if self is Any: + raise TypeError("typing_extensions.Any cannot be used with isinstance()") + return super().__instancecheck__(obj) + + def __repr__(self): + if self is Any: + return "typing_extensions.Any" + return super().__repr__() + + class Any(metaclass=_AnyMeta): + """Special type indicating an unconstrained type. + - Any is compatible with every type. + - Any assumed to have all methods. + - All values assumed to be instances of Any. + Note that all the above statements are true from the point of view of + static type checkers. At runtime, Any should not be used with instance + checks. + """ + def __new__(cls, *args, **kwargs): + if cls is Any: + raise TypeError("Any cannot be instantiated") + return super().__new__(cls, *args, **kwargs) + + +ClassVar = typing.ClassVar + +# Vendored from cpython typing._SpecialFrom +# Having a separate class means that instances will not be rejected by +# typing._type_check. +class _SpecialForm(typing._Final, _root=True): + __slots__ = ('_name', '__doc__', '_getitem') + + def __init__(self, getitem): + self._getitem = getitem + self._name = getitem.__name__ + self.__doc__ = getitem.__doc__ + + def __getattr__(self, item): + if item in {'__name__', '__qualname__'}: + return self._name + + raise AttributeError(item) + + def __mro_entries__(self, bases): + raise TypeError(f"Cannot subclass {self!r}") + + def __repr__(self): + return f'typing_extensions.{self._name}' + + def __reduce__(self): + return self._name + + def __call__(self, *args, **kwds): + raise TypeError(f"Cannot instantiate {self!r}") + + def __or__(self, other): + return typing.Union[self, other] + + def __ror__(self, other): + return typing.Union[other, self] + + def __instancecheck__(self, obj): + raise TypeError(f"{self} cannot be used with isinstance()") + + def __subclasscheck__(self, cls): + raise TypeError(f"{self} cannot be used with issubclass()") + + @typing._tp_cache + def __getitem__(self, parameters): + return self._getitem(self, parameters) + + +# Note that inheriting from this class means that the object will be +# rejected by typing._type_check, so do not use it if the special form +# is arguably valid as a type by itself. +class _ExtensionsSpecialForm(typing._SpecialForm, _root=True): + def __repr__(self): + return 'typing_extensions.' + self._name + + +Final = typing.Final + +# Breakpoint: https://github.com/python/cpython/pull/30530 +if sys.version_info >= (3, 11): + final = typing.final +else: + # @final exists in 3.8+, but we backport it for all versions + # before 3.11 to keep support for the __final__ attribute. + # See https://bugs.python.org/issue46342 + def final(f): + """This decorator can be used to indicate to type checkers that + the decorated method cannot be overridden, and decorated class + cannot be subclassed. For example: + + class Base: + @final + def done(self) -> None: + ... + class Sub(Base): + def done(self) -> None: # Error reported by type checker + ... + @final + class Leaf: + ... + class Other(Leaf): # Error reported by type checker + ... + + There is no runtime checking of these properties. The decorator + sets the ``__final__`` attribute to ``True`` on the decorated object + to allow runtime introspection. + """ + try: + f.__final__ = True + except (AttributeError, TypeError): + # Skip the attribute silently if it is not writable. + # AttributeError happens if the object has __slots__ or a + # read-only property, TypeError if it's a builtin class. + pass + return f + + +if hasattr(typing, "disjoint_base"): # 3.15 + disjoint_base = typing.disjoint_base +else: + def disjoint_base(cls): + """This decorator marks a class as a disjoint base. + + Child classes of a disjoint base cannot inherit from other disjoint bases that are + not parent classes of the disjoint base. + + For example: + + @disjoint_base + class Disjoint1: pass + + @disjoint_base + class Disjoint2: pass + + class Disjoint3(Disjoint1, Disjoint2): pass # Type checker error + + Type checkers can use knowledge of disjoint bases to detect unreachable code + and determine when two types can overlap. + + See PEP 800.""" + cls.__disjoint_base__ = True + return cls + + +def IntVar(name): + return typing.TypeVar(name) + + +# A Literal bug was fixed in 3.11.0, 3.10.1 and 3.9.8 +# Breakpoint: https://github.com/python/cpython/pull/29334 +if sys.version_info >= (3, 10, 1): + Literal = typing.Literal +else: + def _flatten_literal_params(parameters): + """An internal helper for Literal creation: flatten Literals among parameters""" + params = [] + for p in parameters: + if isinstance(p, _LiteralGenericAlias): + params.extend(p.__args__) + else: + params.append(p) + return tuple(params) + + def _value_and_type_iter(params): + for p in params: + yield p, type(p) + + class _LiteralGenericAlias(typing._GenericAlias, _root=True): + def __eq__(self, other): + if not isinstance(other, _LiteralGenericAlias): + return NotImplemented + these_args_deduped = set(_value_and_type_iter(self.__args__)) + other_args_deduped = set(_value_and_type_iter(other.__args__)) + return these_args_deduped == other_args_deduped + + def __hash__(self): + return hash(frozenset(_value_and_type_iter(self.__args__))) + + class _LiteralForm(_ExtensionsSpecialForm, _root=True): + def __init__(self, doc: str): + self._name = 'Literal' + self._doc = self.__doc__ = doc + + def __getitem__(self, parameters): + if not isinstance(parameters, tuple): + parameters = (parameters,) + + parameters = _flatten_literal_params(parameters) + + val_type_pairs = list(_value_and_type_iter(parameters)) + try: + deduped_pairs = set(val_type_pairs) + except TypeError: + # unhashable parameters + pass + else: + # similar logic to typing._deduplicate on Python 3.9+ + if len(deduped_pairs) < len(val_type_pairs): + new_parameters = [] + for pair in val_type_pairs: + if pair in deduped_pairs: + new_parameters.append(pair[0]) + deduped_pairs.remove(pair) + assert not deduped_pairs, deduped_pairs + parameters = tuple(new_parameters) + + return _LiteralGenericAlias(self, parameters) + + Literal = _LiteralForm(doc="""\ + A type that can be used to indicate to type checkers + that the corresponding value has a value literally equivalent + to the provided parameter. For example: + + var: Literal[4] = 4 + + The type checker understands that 'var' is literally equal to + the value 4 and no other value. + + Literal[...] cannot be subclassed. There is no runtime + checking verifying that the parameter is actually a value + instead of a type.""") + + +_overload_dummy = typing._overload_dummy + + +if hasattr(typing, "get_overloads"): # 3.11+ + overload = typing.overload + get_overloads = typing.get_overloads + clear_overloads = typing.clear_overloads +else: + # {module: {qualname: {firstlineno: func}}} + _overload_registry = collections.defaultdict( + functools.partial(collections.defaultdict, dict) + ) + + def overload(func): + """Decorator for overloaded functions/methods. + + In a stub file, place two or more stub definitions for the same + function in a row, each decorated with @overload. For example: + + @overload + def utf8(value: None) -> None: ... + @overload + def utf8(value: bytes) -> bytes: ... + @overload + def utf8(value: str) -> bytes: ... + + In a non-stub file (i.e. a regular .py file), do the same but + follow it with an implementation. The implementation should *not* + be decorated with @overload. For example: + + @overload + def utf8(value: None) -> None: ... + @overload + def utf8(value: bytes) -> bytes: ... + @overload + def utf8(value: str) -> bytes: ... + def utf8(value): + # implementation goes here + + The overloads for a function can be retrieved at runtime using the + get_overloads() function. + """ + # classmethod and staticmethod + f = getattr(func, "__func__", func) + try: + _overload_registry[f.__module__][f.__qualname__][ + f.__code__.co_firstlineno + ] = func + except AttributeError: + # Not a normal function; ignore. + pass + return _overload_dummy + + def get_overloads(func): + """Return all defined overloads for *func* as a sequence.""" + # classmethod and staticmethod + f = getattr(func, "__func__", func) + if f.__module__ not in _overload_registry: + return [] + mod_dict = _overload_registry[f.__module__] + if f.__qualname__ not in mod_dict: + return [] + return list(mod_dict[f.__qualname__].values()) + + def clear_overloads(): + """Clear all overloads in the registry.""" + _overload_registry.clear() + + +# This is not a real generic class. Don't use outside annotations. +Type = typing.Type + +# Various ABCs mimicking those in collections.abc. +# A few are simply re-exported for completeness. +Awaitable = typing.Awaitable +Coroutine = typing.Coroutine +AsyncIterable = typing.AsyncIterable +AsyncIterator = typing.AsyncIterator +Deque = typing.Deque +DefaultDict = typing.DefaultDict +OrderedDict = typing.OrderedDict +Counter = typing.Counter +ChainMap = typing.ChainMap +Text = typing.Text +TYPE_CHECKING = typing.TYPE_CHECKING + + +# Breakpoint: https://github.com/python/cpython/pull/118681 +if sys.version_info >= (3, 13, 0, "beta"): + from typing import AsyncContextManager, AsyncGenerator, ContextManager, Generator +else: + def _is_dunder(attr): + return attr.startswith('__') and attr.endswith('__') + + + class _SpecialGenericAlias(typing._SpecialGenericAlias, _root=True): + def __init__(self, origin, nparams, *, inst=True, name=None, defaults=()): + super().__init__(origin, nparams, inst=inst, name=name) + self._defaults = defaults + + def __setattr__(self, attr, val): + allowed_attrs = {'_name', '_inst', '_nparams', '_defaults'} + if _is_dunder(attr) or attr in allowed_attrs: + object.__setattr__(self, attr, val) + else: + setattr(self.__origin__, attr, val) + + @typing._tp_cache + def __getitem__(self, params): + if not isinstance(params, tuple): + params = (params,) + msg = "Parameters to generic types must be types." + params = tuple(typing._type_check(p, msg) for p in params) + if ( + self._defaults + and len(params) < self._nparams + and len(params) + len(self._defaults) >= self._nparams + ): + params = (*params, *self._defaults[len(params) - self._nparams:]) + actual_len = len(params) + + if actual_len != self._nparams: + if self._defaults: + expected = f"at least {self._nparams - len(self._defaults)}" + else: + expected = str(self._nparams) + if not self._nparams: + raise TypeError(f"{self} is not a generic class") + raise TypeError( + f"Too {'many' if actual_len > self._nparams else 'few'}" + f" arguments for {self};" + f" actual {actual_len}, expected {expected}" + ) + return self.copy_with(params) + + _NoneType = type(None) + Generator = _SpecialGenericAlias( + collections.abc.Generator, 3, defaults=(_NoneType, _NoneType) + ) + AsyncGenerator = _SpecialGenericAlias( + collections.abc.AsyncGenerator, 2, defaults=(_NoneType,) + ) + ContextManager = _SpecialGenericAlias( + contextlib.AbstractContextManager, + 2, + name="ContextManager", + defaults=(typing.Optional[bool],) + ) + AsyncContextManager = _SpecialGenericAlias( + contextlib.AbstractAsyncContextManager, + 2, + name="AsyncContextManager", + defaults=(typing.Optional[bool],) + ) + + +_PROTO_ALLOWLIST = { + 'collections.abc': [ + 'Callable', 'Awaitable', 'Iterable', 'Iterator', 'AsyncIterable', + 'Hashable', 'Sized', 'Container', 'Collection', 'Reversible', 'Buffer', + ], + 'contextlib': ['AbstractContextManager', 'AbstractAsyncContextManager'], + 'typing_extensions': ['Buffer'], +} + + +_EXCLUDED_ATTRS = frozenset(typing.EXCLUDED_ATTRIBUTES) | { + "__match_args__", "__protocol_attrs__", "__non_callable_proto_members__", + "__final__", +} + + +def _get_protocol_attrs(cls): + attrs = set() + for base in cls.__mro__[:-1]: # without object + if base.__name__ in {'Protocol', 'Generic'}: + continue + annotations = getattr(base, '__annotations__', {}) + for attr in (*base.__dict__, *annotations): + if (not attr.startswith('_abc_') and attr not in _EXCLUDED_ATTRS): + attrs.add(attr) + return attrs + + +def _caller(depth=1, default='__main__'): + try: + return sys._getframemodulename(depth + 1) or default + except AttributeError: # For platforms without _getframemodulename() + pass + try: + return sys._getframe(depth + 1).f_globals.get('__name__', default) + except (AttributeError, ValueError): # For platforms without _getframe() + pass + return None + + +# `__match_args__` attribute was removed from protocol members in 3.13, +# we want to backport this change to older Python versions. +# Breakpoint: https://github.com/python/cpython/pull/110683 +if sys.version_info >= (3, 13): + Protocol = typing.Protocol +else: + def _allow_reckless_class_checks(depth=2): + """Allow instance and class checks for special stdlib modules. + The abc and functools modules indiscriminately call isinstance() and + issubclass() on the whole MRO of a user class, which may contain protocols. + """ + return _caller(depth) in {'abc', 'functools', None} + + def _no_init(self, *args, **kwargs): + if type(self)._is_protocol: + raise TypeError('Protocols cannot be instantiated') + + def _type_check_issubclass_arg_1(arg): + """Raise TypeError if `arg` is not an instance of `type` + in `issubclass(arg, )`. + + In most cases, this is verified by type.__subclasscheck__. + Checking it again unnecessarily would slow down issubclass() checks, + so, we don't perform this check unless we absolutely have to. + + For various error paths, however, + we want to ensure that *this* error message is shown to the user + where relevant, rather than a typing.py-specific error message. + """ + if not isinstance(arg, type): + # Same error message as for issubclass(1, int). + raise TypeError('issubclass() arg 1 must be a class') + + # Inheriting from typing._ProtocolMeta isn't actually desirable, + # but is necessary to allow typing.Protocol and typing_extensions.Protocol + # to mix without getting TypeErrors about "metaclass conflict" + class _ProtocolMeta(type(typing.Protocol)): + # This metaclass is somewhat unfortunate, + # but is necessary for several reasons... + # + # NOTE: DO NOT call super() in any methods in this class + # That would call the methods on typing._ProtocolMeta on Python <=3.11 + # and those are slow + def __new__(mcls, name, bases, namespace, **kwargs): + if name == "Protocol" and len(bases) < 2: + pass + elif {Protocol, typing.Protocol} & set(bases): + for base in bases: + if not ( + base in {object, typing.Generic, Protocol, typing.Protocol} + or base.__name__ in _PROTO_ALLOWLIST.get(base.__module__, []) + or is_protocol(base) + ): + raise TypeError( + f"Protocols can only inherit from other protocols, " + f"got {base!r}" + ) + return abc.ABCMeta.__new__(mcls, name, bases, namespace, **kwargs) + + def __init__(cls, *args, **kwargs): + abc.ABCMeta.__init__(cls, *args, **kwargs) + if getattr(cls, "_is_protocol", False): + cls.__protocol_attrs__ = _get_protocol_attrs(cls) + + def __subclasscheck__(cls, other): + if cls is Protocol: + return type.__subclasscheck__(cls, other) + if ( + getattr(cls, '_is_protocol', False) + and not _allow_reckless_class_checks() + ): + if not getattr(cls, '_is_runtime_protocol', False): + _type_check_issubclass_arg_1(other) + raise TypeError( + "Instance and class checks can only be used with " + "@runtime_checkable protocols" + ) + if ( + # this attribute is set by @runtime_checkable: + cls.__non_callable_proto_members__ + and cls.__dict__.get("__subclasshook__") is _proto_hook + ): + _type_check_issubclass_arg_1(other) + non_method_attrs = sorted(cls.__non_callable_proto_members__) + raise TypeError( + "Protocols with non-method members don't support issubclass()." + f" Non-method members: {str(non_method_attrs)[1:-1]}." + ) + return abc.ABCMeta.__subclasscheck__(cls, other) + + def __instancecheck__(cls, instance): + # We need this method for situations where attributes are + # assigned in __init__. + if cls is Protocol: + return type.__instancecheck__(cls, instance) + if not getattr(cls, "_is_protocol", False): + # i.e., it's a concrete subclass of a protocol + return abc.ABCMeta.__instancecheck__(cls, instance) + + if ( + not getattr(cls, '_is_runtime_protocol', False) and + not _allow_reckless_class_checks() + ): + raise TypeError("Instance and class checks can only be used with" + " @runtime_checkable protocols") + + if abc.ABCMeta.__instancecheck__(cls, instance): + return True + + for attr in cls.__protocol_attrs__: + try: + val = inspect.getattr_static(instance, attr) + except AttributeError: + break + # this attribute is set by @runtime_checkable: + if val is None and attr not in cls.__non_callable_proto_members__: + break + else: + return True + + return False + + def __eq__(cls, other): + # Hack so that typing.Generic.__class_getitem__ + # treats typing_extensions.Protocol + # as equivalent to typing.Protocol + if abc.ABCMeta.__eq__(cls, other) is True: + return True + return cls is Protocol and other is typing.Protocol + + # This has to be defined, or the abc-module cache + # complains about classes with this metaclass being unhashable, + # if we define only __eq__! + def __hash__(cls) -> int: + return type.__hash__(cls) + + @classmethod + def _proto_hook(cls, other): + if not cls.__dict__.get('_is_protocol', False): + return NotImplemented + + for attr in cls.__protocol_attrs__: + for base in other.__mro__: + # Check if the members appears in the class dictionary... + if attr in base.__dict__: + if base.__dict__[attr] is None: + return NotImplemented + break + + # ...or in annotations, if it is a sub-protocol. + annotations = getattr(base, '__annotations__', {}) + if ( + isinstance(annotations, collections.abc.Mapping) + and attr in annotations + and is_protocol(other) + ): + break + else: + return NotImplemented + return True + + class Protocol(typing.Generic, metaclass=_ProtocolMeta): + __doc__ = typing.Protocol.__doc__ + __slots__ = () + _is_protocol = True + _is_runtime_protocol = False + + def __init_subclass__(cls, *args, **kwargs): + super().__init_subclass__(*args, **kwargs) + + # Determine if this is a protocol or a concrete subclass. + if not cls.__dict__.get('_is_protocol', False): + cls._is_protocol = any(b is Protocol for b in cls.__bases__) + + # Set (or override) the protocol subclass hook. + if '__subclasshook__' not in cls.__dict__: + cls.__subclasshook__ = _proto_hook + + # Prohibit instantiation for protocol classes + if cls._is_protocol and cls.__init__ is Protocol.__init__: + cls.__init__ = _no_init + + +# Breakpoint: https://github.com/python/cpython/pull/113401 +if sys.version_info >= (3, 13): + runtime_checkable = typing.runtime_checkable +else: + def runtime_checkable(cls): + """Mark a protocol class as a runtime protocol. + + Such protocol can be used with isinstance() and issubclass(). + Raise TypeError if applied to a non-protocol class. + This allows a simple-minded structural check very similar to + one trick ponies in collections.abc such as Iterable. + + For example:: + + @runtime_checkable + class Closable(Protocol): + def close(self): ... + + assert isinstance(open('/some/file'), Closable) + + Warning: this will check only the presence of the required methods, + not their type signatures! + """ + if not issubclass(cls, typing.Generic) or not getattr(cls, '_is_protocol', False): + raise TypeError(f'@runtime_checkable can be only applied to protocol classes,' + f' got {cls!r}') + cls._is_runtime_protocol = True + + # typing.Protocol classes on <=3.11 break if we execute this block, + # because typing.Protocol classes on <=3.11 don't have a + # `__protocol_attrs__` attribute, and this block relies on the + # `__protocol_attrs__` attribute. Meanwhile, typing.Protocol classes on 3.12.2+ + # break if we *don't* execute this block, because *they* assume that all + # protocol classes have a `__non_callable_proto_members__` attribute + # (which this block sets) + if isinstance(cls, _ProtocolMeta) or sys.version_info >= (3, 12, 2): + # PEP 544 prohibits using issubclass() + # with protocols that have non-method members. + # See gh-113320 for why we compute this attribute here, + # rather than in `_ProtocolMeta.__init__` + cls.__non_callable_proto_members__ = set() + for attr in cls.__protocol_attrs__: + try: + is_callable = callable(getattr(cls, attr, None)) + except Exception as e: + raise TypeError( + f"Failed to determine whether protocol member {attr!r} " + "is a method member" + ) from e + else: + if not is_callable: + cls.__non_callable_proto_members__.add(attr) + + return cls + + +# The "runtime" alias exists for backwards compatibility. +runtime = runtime_checkable + + +# Our version of runtime-checkable protocols is faster on Python <=3.11 +# Breakpoint: https://github.com/python/cpython/pull/112717 +if sys.version_info >= (3, 12): + SupportsInt = typing.SupportsInt + SupportsFloat = typing.SupportsFloat + SupportsComplex = typing.SupportsComplex + SupportsBytes = typing.SupportsBytes + SupportsIndex = typing.SupportsIndex + SupportsAbs = typing.SupportsAbs + SupportsRound = typing.SupportsRound +else: + @runtime_checkable + class SupportsInt(Protocol): + """An ABC with one abstract method __int__.""" + __slots__ = () + + @abc.abstractmethod + def __int__(self) -> int: + pass + + @runtime_checkable + class SupportsFloat(Protocol): + """An ABC with one abstract method __float__.""" + __slots__ = () + + @abc.abstractmethod + def __float__(self) -> float: + pass + + @runtime_checkable + class SupportsComplex(Protocol): + """An ABC with one abstract method __complex__.""" + __slots__ = () + + @abc.abstractmethod + def __complex__(self) -> complex: + pass + + @runtime_checkable + class SupportsBytes(Protocol): + """An ABC with one abstract method __bytes__.""" + __slots__ = () + + @abc.abstractmethod + def __bytes__(self) -> bytes: + pass + + @runtime_checkable + class SupportsIndex(Protocol): + __slots__ = () + + @abc.abstractmethod + def __index__(self) -> int: + pass + + @runtime_checkable + class SupportsAbs(Protocol[T_co]): + """ + An ABC with one abstract method __abs__ that is covariant in its return type. + """ + __slots__ = () + + @abc.abstractmethod + def __abs__(self) -> T_co: + pass + + @runtime_checkable + class SupportsRound(Protocol[T_co]): + """ + An ABC with one abstract method __round__ that is covariant in its return type. + """ + __slots__ = () + + @abc.abstractmethod + def __round__(self, ndigits: int = 0) -> T_co: + pass + + +if hasattr(io, "Reader") and hasattr(io, "Writer"): + Reader = io.Reader + Writer = io.Writer +else: + @runtime_checkable + class Reader(Protocol[T_co]): + """Protocol for simple I/O reader instances. + + This protocol only supports blocking I/O. + """ + + __slots__ = () + + @abc.abstractmethod + def read(self, size: int = ..., /) -> T_co: + """Read data from the input stream and return it. + + If *size* is specified, at most *size* items (bytes/characters) will be + read. + """ + + @runtime_checkable + class Writer(Protocol[T_contra]): + """Protocol for simple I/O writer instances. + + This protocol only supports blocking I/O. + """ + + __slots__ = () + + @abc.abstractmethod + def write(self, data: T_contra, /) -> int: + """Write *data* to the output stream and return the number of items written.""" # noqa: E501 + + +_NEEDS_SINGLETONMETA = ( + not hasattr(typing, "NoDefault") or not hasattr(typing, "NoExtraItems") +) + +if _NEEDS_SINGLETONMETA: + class SingletonMeta(type): + def __setattr__(cls, attr, value): + # TypeError is consistent with the behavior of NoneType + raise TypeError( + f"cannot set {attr!r} attribute of immutable type {cls.__name__!r}" + ) + + +if hasattr(typing, "NoDefault"): + NoDefault = typing.NoDefault +else: + class NoDefaultType(metaclass=SingletonMeta): + """The type of the NoDefault singleton.""" + + __slots__ = () + + def __new__(cls): + return globals().get("NoDefault") or object.__new__(cls) + + def __repr__(self): + return "typing_extensions.NoDefault" + + def __reduce__(self): + return "NoDefault" + + NoDefault = NoDefaultType() + del NoDefaultType + +if hasattr(typing, "NoExtraItems"): + NoExtraItems = typing.NoExtraItems +else: + class NoExtraItemsType(metaclass=SingletonMeta): + """The type of the NoExtraItems singleton.""" + + __slots__ = () + + def __new__(cls): + return globals().get("NoExtraItems") or object.__new__(cls) + + def __repr__(self): + return "typing_extensions.NoExtraItems" + + def __reduce__(self): + return "NoExtraItems" + + NoExtraItems = NoExtraItemsType() + del NoExtraItemsType + +if _NEEDS_SINGLETONMETA: + del SingletonMeta + + +# Update this to something like >=3.13.0b1 if and when +# PEP 728 is implemented in CPython +_PEP_728_IMPLEMENTED = False + +if _PEP_728_IMPLEMENTED: + # The standard library TypedDict in Python 3.9.0/1 does not honour the "total" + # keyword with old-style TypedDict(). See https://bugs.python.org/issue42059 + # The standard library TypedDict below Python 3.11 does not store runtime + # information about optional and required keys when using Required or NotRequired. + # Generic TypedDicts are also impossible using typing.TypedDict on Python <3.11. + # Aaaand on 3.12 we add __orig_bases__ to TypedDict + # to enable better runtime introspection. + # On 3.13 we deprecate some odd ways of creating TypedDicts. + # Also on 3.13, PEP 705 adds the ReadOnly[] qualifier. + # PEP 728 (still pending) makes more changes. + TypedDict = typing.TypedDict + _TypedDictMeta = typing._TypedDictMeta + is_typeddict = typing.is_typeddict +else: + # 3.10.0 and later + _TAKES_MODULE = "module" in inspect.signature(typing._type_check).parameters + + def _get_typeddict_qualifiers(annotation_type): + while True: + annotation_origin = get_origin(annotation_type) + if annotation_origin is Annotated: + annotation_args = get_args(annotation_type) + if annotation_args: + annotation_type = annotation_args[0] + else: + break + elif annotation_origin is Required: + yield Required + annotation_type, = get_args(annotation_type) + elif annotation_origin is NotRequired: + yield NotRequired + annotation_type, = get_args(annotation_type) + elif annotation_origin is ReadOnly: + yield ReadOnly + annotation_type, = get_args(annotation_type) + else: + break + + class _TypedDictMeta(type): + + def __new__(cls, name, bases, ns, *, total=True, closed=None, + extra_items=NoExtraItems): + """Create new typed dict class object. + + This method is called when TypedDict is subclassed, + or when TypedDict is instantiated. This way + TypedDict supports all three syntax forms described in its docstring. + Subclasses and instances of TypedDict return actual dictionaries. + """ + for base in bases: + if type(base) is not _TypedDictMeta and base is not typing.Generic: + raise TypeError('cannot inherit from both a TypedDict type ' + 'and a non-TypedDict base class') + if closed is not None and extra_items is not NoExtraItems: + raise TypeError(f"Cannot combine closed={closed!r} and extra_items") + + if any(issubclass(b, typing.Generic) for b in bases): + generic_base = (typing.Generic,) + else: + generic_base = () + + ns_annotations = ns.pop('__annotations__', None) + + # typing.py generally doesn't let you inherit from plain Generic, unless + # the name of the class happens to be "Protocol" + tp_dict = type.__new__(_TypedDictMeta, "Protocol", (*generic_base, dict), ns) + tp_dict.__name__ = name + if tp_dict.__qualname__ == "Protocol": + tp_dict.__qualname__ = name + + if not hasattr(tp_dict, '__orig_bases__'): + tp_dict.__orig_bases__ = bases + + annotations = {} + own_annotate = None + if ns_annotations is not None: + own_annotations = ns_annotations + elif sys.version_info >= (3, 14): + if hasattr(annotationlib, "get_annotate_from_class_namespace"): + own_annotate = annotationlib.get_annotate_from_class_namespace(ns) + else: + # 3.14.0a7 and earlier + own_annotate = ns.get("__annotate__") + if own_annotate is not None: + own_annotations = annotationlib.call_annotate_function( + own_annotate, Format.FORWARDREF, owner=tp_dict + ) + else: + own_annotations = {} + else: + own_annotations = {} + msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type" + if _TAKES_MODULE: + own_checked_annotations = { + n: typing._type_check(tp, msg, module=tp_dict.__module__) + for n, tp in own_annotations.items() + } + else: + own_checked_annotations = { + n: typing._type_check(tp, msg) + for n, tp in own_annotations.items() + } + required_keys = set() + optional_keys = set() + readonly_keys = set() + mutable_keys = set() + extra_items_type = extra_items + + for base in bases: + base_dict = base.__dict__ + + if sys.version_info <= (3, 14): + annotations.update(base_dict.get('__annotations__', {})) + required_keys.update(base_dict.get('__required_keys__', ())) + optional_keys.update(base_dict.get('__optional_keys__', ())) + readonly_keys.update(base_dict.get('__readonly_keys__', ())) + mutable_keys.update(base_dict.get('__mutable_keys__', ())) + + # This was specified in an earlier version of PEP 728. Support + # is retained for backwards compatibility, but only for Python + # 3.13 and lower. + if (closed and sys.version_info < (3, 14) + and "__extra_items__" in own_checked_annotations): + annotation_type = own_checked_annotations.pop("__extra_items__") + qualifiers = set(_get_typeddict_qualifiers(annotation_type)) + if Required in qualifiers: + raise TypeError( + "Special key __extra_items__ does not support " + "Required" + ) + if NotRequired in qualifiers: + raise TypeError( + "Special key __extra_items__ does not support " + "NotRequired" + ) + extra_items_type = annotation_type + + annotations.update(own_checked_annotations) + for annotation_key, annotation_type in own_checked_annotations.items(): + qualifiers = set(_get_typeddict_qualifiers(annotation_type)) + + if Required in qualifiers: + required_keys.add(annotation_key) + elif NotRequired in qualifiers: + optional_keys.add(annotation_key) + elif total: + required_keys.add(annotation_key) + else: + optional_keys.add(annotation_key) + if ReadOnly in qualifiers: + mutable_keys.discard(annotation_key) + readonly_keys.add(annotation_key) + else: + mutable_keys.add(annotation_key) + readonly_keys.discard(annotation_key) + + # Breakpoint: https://github.com/python/cpython/pull/119891 + if sys.version_info >= (3, 14): + def __annotate__(format): + annos = {} + for base in bases: + if base is Generic: + continue + base_annotate = base.__annotate__ + if base_annotate is None: + continue + base_annos = annotationlib.call_annotate_function( + base_annotate, format, owner=base) + annos.update(base_annos) + if own_annotate is not None: + own = annotationlib.call_annotate_function( + own_annotate, format, owner=tp_dict) + if format != Format.STRING: + own = { + n: typing._type_check(tp, msg, module=tp_dict.__module__) + for n, tp in own.items() + } + elif format == Format.STRING: + own = annotationlib.annotations_to_string(own_annotations) + elif format in (Format.FORWARDREF, Format.VALUE): + own = own_checked_annotations + else: + raise NotImplementedError(format) + annos.update(own) + return annos + + tp_dict.__annotate__ = __annotate__ + else: + tp_dict.__annotations__ = annotations + tp_dict.__required_keys__ = frozenset(required_keys) + tp_dict.__optional_keys__ = frozenset(optional_keys) + tp_dict.__readonly_keys__ = frozenset(readonly_keys) + tp_dict.__mutable_keys__ = frozenset(mutable_keys) + tp_dict.__total__ = total + tp_dict.__closed__ = closed + tp_dict.__extra_items__ = extra_items_type + return tp_dict + + __call__ = dict # static method + + def __subclasscheck__(cls, other): + # Typed dicts are only for static structural subtyping. + raise TypeError('TypedDict does not support instance and class checks') + + __instancecheck__ = __subclasscheck__ + + _TypedDict = type.__new__(_TypedDictMeta, 'TypedDict', (), {}) + + def _create_typeddict( + typename, + fields, + /, + *, + typing_is_inline, + total, + closed, + extra_items, + **kwargs, + ): + if fields is _marker or fields is None: + if fields is _marker: + deprecated_thing = ( + "Failing to pass a value for the 'fields' parameter" + ) + else: + deprecated_thing = "Passing `None` as the 'fields' parameter" + + example = f"`{typename} = TypedDict({typename!r}, {{}})`" + deprecation_msg = ( + f"{deprecated_thing} is deprecated and will be disallowed in " + "Python 3.15. To create a TypedDict class with 0 fields " + "using the functional syntax, pass an empty dictionary, e.g. " + ) + example + "." + warnings.warn(deprecation_msg, DeprecationWarning, stacklevel=2) + # Support a field called "closed" + if closed is not False and closed is not True and closed is not None: + kwargs["closed"] = closed + closed = None + # Or "extra_items" + if extra_items is not NoExtraItems: + kwargs["extra_items"] = extra_items + extra_items = NoExtraItems + fields = kwargs + elif kwargs: + raise TypeError("TypedDict takes either a dict or keyword arguments," + " but not both") + if kwargs: + # Breakpoint: https://github.com/python/cpython/pull/104891 + if sys.version_info >= (3, 13): + raise TypeError("TypedDict takes no keyword arguments") + warnings.warn( + "The kwargs-based syntax for TypedDict definitions is deprecated " + "in Python 3.11, will be removed in Python 3.13, and may not be " + "understood by third-party type checkers.", + DeprecationWarning, + stacklevel=2, + ) + + ns = {'__annotations__': dict(fields)} + module = _caller(depth=4 if typing_is_inline else 2) + if module is not None: + # Setting correct module is necessary to make typed dict classes + # pickleable. + ns['__module__'] = module + + td = _TypedDictMeta(typename, (), ns, total=total, closed=closed, + extra_items=extra_items) + td.__orig_bases__ = (TypedDict,) + return td + + class _TypedDictSpecialForm(_SpecialForm, _root=True): + def __call__( + self, + typename, + fields=_marker, + /, + *, + total=True, + closed=None, + extra_items=NoExtraItems, + **kwargs + ): + return _create_typeddict( + typename, + fields, + typing_is_inline=False, + total=total, + closed=closed, + extra_items=extra_items, + **kwargs, + ) + + def __mro_entries__(self, bases): + return (_TypedDict,) + + @_TypedDictSpecialForm + def TypedDict(self, args): + """A simple typed namespace. At runtime it is equivalent to a plain dict. + + TypedDict creates a dictionary type such that a type checker will expect all + instances to have a certain set of keys, where each key is + associated with a value of a consistent type. This expectation + is not checked at runtime. + + Usage:: + + class Point2D(TypedDict): + x: int + y: int + label: str + + a: Point2D = {'x': 1, 'y': 2, 'label': 'good'} # OK + b: Point2D = {'z': 3, 'label': 'bad'} # Fails type check + + assert Point2D(x=1, y=2, label='first') == dict(x=1, y=2, label='first') + + The type info can be accessed via the Point2D.__annotations__ dict, and + the Point2D.__required_keys__ and Point2D.__optional_keys__ frozensets. + TypedDict supports an additional equivalent form:: + + Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str}) + + By default, all keys must be present in a TypedDict. It is possible + to override this by specifying totality:: + + class Point2D(TypedDict, total=False): + x: int + y: int + + This means that a Point2D TypedDict can have any of the keys omitted. A type + checker is only expected to support a literal False or True as the value of + the total argument. True is the default, and makes all items defined in the + class body be required. + + The Required and NotRequired special forms can also be used to mark + individual keys as being required or not required:: + + class Point2D(TypedDict): + x: int # the "x" key must always be present (Required is the default) + y: NotRequired[int] # the "y" key can be omitted + + See PEP 655 for more details on Required and NotRequired. + """ + # This runs when creating inline TypedDicts: + if not isinstance(args, dict): + raise TypeError( + "TypedDict[...] should be used with a single dict argument" + ) + + return _create_typeddict( + "", + args, + typing_is_inline=True, + total=True, + closed=True, + extra_items=NoExtraItems, + ) + + _TYPEDDICT_TYPES = (typing._TypedDictMeta, _TypedDictMeta) + + def is_typeddict(tp): + """Check if an annotation is a TypedDict class + + For example:: + class Film(TypedDict): + title: str + year: int + + is_typeddict(Film) # => True + is_typeddict(Union[list, str]) # => False + """ + return isinstance(tp, _TYPEDDICT_TYPES) + + +if hasattr(typing, "assert_type"): + assert_type = typing.assert_type + +else: + def assert_type(val, typ, /): + """Assert (to the type checker) that the value is of the given type. + + When the type checker encounters a call to assert_type(), it + emits an error if the value is not of the specified type:: + + def greet(name: str) -> None: + assert_type(name, str) # ok + assert_type(name, int) # type checker error + + At runtime this returns the first argument unchanged and otherwise + does nothing. + """ + return val + + +if hasattr(typing, "ReadOnly"): # 3.13+ + get_type_hints = typing.get_type_hints +else: # <=3.13 + # replaces _strip_annotations() + def _strip_extras(t): + """Strips Annotated, Required and NotRequired from a given type.""" + if isinstance(t, typing._AnnotatedAlias): + return _strip_extras(t.__origin__) + if hasattr(t, "__origin__") and t.__origin__ in (Required, NotRequired, ReadOnly): + return _strip_extras(t.__args__[0]) + if isinstance(t, typing._GenericAlias): + stripped_args = tuple(_strip_extras(a) for a in t.__args__) + if stripped_args == t.__args__: + return t + return t.copy_with(stripped_args) + if hasattr(_types, "GenericAlias") and isinstance(t, _types.GenericAlias): + stripped_args = tuple(_strip_extras(a) for a in t.__args__) + if stripped_args == t.__args__: + return t + return _types.GenericAlias(t.__origin__, stripped_args) + if hasattr(_types, "UnionType") and isinstance(t, _types.UnionType): + stripped_args = tuple(_strip_extras(a) for a in t.__args__) + if stripped_args == t.__args__: + return t + return functools.reduce(operator.or_, stripped_args) + + return t + + def get_type_hints(obj, globalns=None, localns=None, include_extras=False): + """Return type hints for an object. + + This is often the same as obj.__annotations__, but it handles + forward references encoded as string literals, adds Optional[t] if a + default value equal to None is set and recursively replaces all + 'Annotated[T, ...]', 'Required[T]' or 'NotRequired[T]' with 'T' + (unless 'include_extras=True'). + + The argument may be a module, class, method, or function. The annotations + are returned as a dictionary. For classes, annotations include also + inherited members. + + TypeError is raised if the argument is not of a type that can contain + annotations, and an empty dictionary is returned if no annotations are + present. + + BEWARE -- the behavior of globalns and localns is counterintuitive + (unless you are familiar with how eval() and exec() work). The + search order is locals first, then globals. + + - If no dict arguments are passed, an attempt is made to use the + globals from obj (or the respective module's globals for classes), + and these are also used as the locals. If the object does not appear + to have globals, an empty dictionary is used. + + - If one dict argument is passed, it is used for both globals and + locals. + + - If two dict arguments are passed, they specify globals and + locals, respectively. + """ + hint = typing.get_type_hints( + obj, globalns=globalns, localns=localns, include_extras=True + ) + # Breakpoint: https://github.com/python/cpython/pull/30304 + if sys.version_info < (3, 11): + _clean_optional(obj, hint, globalns, localns) + if include_extras: + return hint + return {k: _strip_extras(t) for k, t in hint.items()} + + _NoneType = type(None) + + def _could_be_inserted_optional(t): + """detects Union[..., None] pattern""" + if not isinstance(t, typing._UnionGenericAlias): + return False + # Assume if last argument is not None they are user defined + if t.__args__[-1] is not _NoneType: + return False + return True + + # < 3.11 + def _clean_optional(obj, hints, globalns=None, localns=None): + # reverts injected Union[..., None] cases from typing.get_type_hints + # when a None default value is used. + # see https://github.com/python/typing_extensions/issues/310 + if not hints or isinstance(obj, type): + return + defaults = typing._get_defaults(obj) # avoid accessing __annotations___ + if not defaults: + return + original_hints = obj.__annotations__ + for name, value in hints.items(): + # Not a Union[..., None] or replacement conditions not fullfilled + if (not _could_be_inserted_optional(value) + or name not in defaults + or defaults[name] is not None + ): + continue + original_value = original_hints[name] + # value=NoneType should have caused a skip above but check for safety + if original_value is None: + original_value = _NoneType + # Forward reference + if isinstance(original_value, str): + if globalns is None: + if isinstance(obj, _types.ModuleType): + globalns = obj.__dict__ + else: + nsobj = obj + # Find globalns for the unwrapped object. + while hasattr(nsobj, '__wrapped__'): + nsobj = nsobj.__wrapped__ + globalns = getattr(nsobj, '__globals__', {}) + if localns is None: + localns = globalns + elif localns is None: + localns = globalns + + original_value = ForwardRef( + original_value, + is_argument=not isinstance(obj, _types.ModuleType) + ) + original_evaluated = typing._eval_type(original_value, globalns, localns) + # Compare if values differ. Note that even if equal + # value might be cached by typing._tp_cache contrary to original_evaluated + if original_evaluated != value or ( + # 3.10: ForwardRefs of UnionType might be turned into _UnionGenericAlias + hasattr(_types, "UnionType") + and isinstance(original_evaluated, _types.UnionType) + and not isinstance(value, _types.UnionType) + ): + hints[name] = original_evaluated + +# Python 3.9 has get_origin() and get_args() but those implementations don't support +# ParamSpecArgs and ParamSpecKwargs, so only Python 3.10's versions will do. +# Breakpoint: https://github.com/python/cpython/pull/25298 +if sys.version_info >= (3, 10): + get_origin = typing.get_origin + get_args = typing.get_args +# 3.9 +else: + def get_origin(tp): + """Get the unsubscripted version of a type. + + This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar + and Annotated. Return None for unsupported types. Examples:: + + get_origin(Literal[42]) is Literal + get_origin(int) is None + get_origin(ClassVar[int]) is ClassVar + get_origin(Generic) is Generic + get_origin(Generic[T]) is Generic + get_origin(Union[T, int]) is Union + get_origin(List[Tuple[T, T]][int]) == list + get_origin(P.args) is P + """ + if isinstance(tp, typing._AnnotatedAlias): + return Annotated + if isinstance(tp, (typing._BaseGenericAlias, _types.GenericAlias, + ParamSpecArgs, ParamSpecKwargs)): + return tp.__origin__ + if tp is typing.Generic: + return typing.Generic + return None + + def get_args(tp): + """Get type arguments with all substitutions performed. + + For unions, basic simplifications used by Union constructor are performed. + Examples:: + get_args(Dict[str, int]) == (str, int) + get_args(int) == () + get_args(Union[int, Union[T, int], str][int]) == (int, str) + get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int]) + get_args(Callable[[], T][int]) == ([], int) + """ + if isinstance(tp, typing._AnnotatedAlias): + return (tp.__origin__, *tp.__metadata__) + if isinstance(tp, (typing._GenericAlias, _types.GenericAlias)): + res = tp.__args__ + if get_origin(tp) is collections.abc.Callable and res[0] is not Ellipsis: + res = (list(res[:-1]), res[-1]) + return res + return () + + +# 3.10+ +if hasattr(typing, 'TypeAlias'): + TypeAlias = typing.TypeAlias +# 3.9 +else: + @_ExtensionsSpecialForm + def TypeAlias(self, parameters): + """Special marker indicating that an assignment should + be recognized as a proper type alias definition by type + checkers. + + For example:: + + Predicate: TypeAlias = Callable[..., bool] + + It's invalid when used anywhere except as in the example above. + """ + raise TypeError(f"{self} is not subscriptable") + + +def _set_default(type_param, default): + type_param.has_default = lambda: default is not NoDefault + type_param.__default__ = default + + +def _set_module(typevarlike): + # for pickling: + def_mod = _caller(depth=2) + if def_mod != 'typing_extensions': + typevarlike.__module__ = def_mod + + +class _DefaultMixin: + """Mixin for TypeVarLike defaults.""" + + __slots__ = () + __init__ = _set_default + + +# Classes using this metaclass must provide a _backported_typevarlike ClassVar +class _TypeVarLikeMeta(type): + def __instancecheck__(cls, __instance: Any) -> bool: + return isinstance(__instance, cls._backported_typevarlike) + + +if _PEP_696_IMPLEMENTED: + from typing import TypeVar +else: + # Add default and infer_variance parameters from PEP 696 and 695 + class TypeVar(metaclass=_TypeVarLikeMeta): + """Type variable.""" + + _backported_typevarlike = typing.TypeVar + + def __new__(cls, name, *constraints, bound=None, + covariant=False, contravariant=False, + default=NoDefault, infer_variance=False): + if hasattr(typing, "TypeAliasType"): + # PEP 695 implemented (3.12+), can pass infer_variance to typing.TypeVar + typevar = typing.TypeVar(name, *constraints, bound=bound, + covariant=covariant, contravariant=contravariant, + infer_variance=infer_variance) + else: + typevar = typing.TypeVar(name, *constraints, bound=bound, + covariant=covariant, contravariant=contravariant) + if infer_variance and (covariant or contravariant): + raise ValueError("Variance cannot be specified with infer_variance.") + typevar.__infer_variance__ = infer_variance + + _set_default(typevar, default) + _set_module(typevar) + + def _tvar_prepare_subst(alias, args): + if ( + typevar.has_default() + and alias.__parameters__.index(typevar) == len(args) + ): + args += (typevar.__default__,) + return args + + typevar.__typing_prepare_subst__ = _tvar_prepare_subst + return typevar + + def __init_subclass__(cls) -> None: + raise TypeError(f"type '{__name__}.TypeVar' is not an acceptable base type") + + +# Python 3.10+ has PEP 612 +if hasattr(typing, 'ParamSpecArgs'): + ParamSpecArgs = typing.ParamSpecArgs + ParamSpecKwargs = typing.ParamSpecKwargs +# 3.9 +else: + class _Immutable: + """Mixin to indicate that object should not be copied.""" + __slots__ = () + + def __copy__(self): + return self + + def __deepcopy__(self, memo): + return self + + class ParamSpecArgs(_Immutable): + """The args for a ParamSpec object. + + Given a ParamSpec object P, P.args is an instance of ParamSpecArgs. + + ParamSpecArgs objects have a reference back to their ParamSpec: + + P.args.__origin__ is P + + This type is meant for runtime introspection and has no special meaning to + static type checkers. + """ + def __init__(self, origin): + self.__origin__ = origin + + def __repr__(self): + return f"{self.__origin__.__name__}.args" + + def __eq__(self, other): + if not isinstance(other, ParamSpecArgs): + return NotImplemented + return self.__origin__ == other.__origin__ + + class ParamSpecKwargs(_Immutable): + """The kwargs for a ParamSpec object. + + Given a ParamSpec object P, P.kwargs is an instance of ParamSpecKwargs. + + ParamSpecKwargs objects have a reference back to their ParamSpec: + + P.kwargs.__origin__ is P + + This type is meant for runtime introspection and has no special meaning to + static type checkers. + """ + def __init__(self, origin): + self.__origin__ = origin + + def __repr__(self): + return f"{self.__origin__.__name__}.kwargs" + + def __eq__(self, other): + if not isinstance(other, ParamSpecKwargs): + return NotImplemented + return self.__origin__ == other.__origin__ + + +if _PEP_696_IMPLEMENTED: + from typing import ParamSpec + +# 3.10+ +elif hasattr(typing, 'ParamSpec'): + + # Add default parameter - PEP 696 + class ParamSpec(metaclass=_TypeVarLikeMeta): + """Parameter specification.""" + + _backported_typevarlike = typing.ParamSpec + + def __new__(cls, name, *, bound=None, + covariant=False, contravariant=False, + infer_variance=False, default=NoDefault): + if hasattr(typing, "TypeAliasType"): + # PEP 695 implemented, can pass infer_variance to typing.TypeVar + paramspec = typing.ParamSpec(name, bound=bound, + covariant=covariant, + contravariant=contravariant, + infer_variance=infer_variance) + else: + paramspec = typing.ParamSpec(name, bound=bound, + covariant=covariant, + contravariant=contravariant) + paramspec.__infer_variance__ = infer_variance + + _set_default(paramspec, default) + _set_module(paramspec) + + def _paramspec_prepare_subst(alias, args): + params = alias.__parameters__ + i = params.index(paramspec) + if i == len(args) and paramspec.has_default(): + args = [*args, paramspec.__default__] + if i >= len(args): + raise TypeError(f"Too few arguments for {alias}") + # Special case where Z[[int, str, bool]] == Z[int, str, bool] in PEP 612. + if len(params) == 1 and not typing._is_param_expr(args[0]): + assert i == 0 + args = (args,) + # Convert lists to tuples to help other libraries cache the results. + elif isinstance(args[i], list): + args = (*args[:i], tuple(args[i]), *args[i + 1:]) + return args + + paramspec.__typing_prepare_subst__ = _paramspec_prepare_subst + return paramspec + + def __init_subclass__(cls) -> None: + raise TypeError(f"type '{__name__}.ParamSpec' is not an acceptable base type") + +# 3.9 +else: + + # Inherits from list as a workaround for Callable checks in Python < 3.9.2. + class ParamSpec(list, _DefaultMixin): + """Parameter specification variable. + + Usage:: + + P = ParamSpec('P') + + Parameter specification variables exist primarily for the benefit of static + type checkers. They are used to forward the parameter types of one + callable to another callable, a pattern commonly found in higher order + functions and decorators. They are only valid when used in ``Concatenate``, + or s the first argument to ``Callable``. In Python 3.10 and higher, + they are also supported in user-defined Generics at runtime. + See class Generic for more information on generic types. An + example for annotating a decorator:: + + T = TypeVar('T') + P = ParamSpec('P') + + def add_logging(f: Callable[P, T]) -> Callable[P, T]: + '''A type-safe decorator to add logging to a function.''' + def inner(*args: P.args, **kwargs: P.kwargs) -> T: + logging.info(f'{f.__name__} was called') + return f(*args, **kwargs) + return inner + + @add_logging + def add_two(x: float, y: float) -> float: + '''Add two numbers together.''' + return x + y + + Parameter specification variables defined with covariant=True or + contravariant=True can be used to declare covariant or contravariant + generic types. These keyword arguments are valid, but their actual semantics + are yet to be decided. See PEP 612 for details. + + Parameter specification variables can be introspected. e.g.: + + P.__name__ == 'T' + P.__bound__ == None + P.__covariant__ == False + P.__contravariant__ == False + + Note that only parameter specification variables defined in global scope can + be pickled. + """ + + # Trick Generic __parameters__. + __class__ = typing.TypeVar + + @property + def args(self): + return ParamSpecArgs(self) + + @property + def kwargs(self): + return ParamSpecKwargs(self) + + def __init__(self, name, *, bound=None, covariant=False, contravariant=False, + infer_variance=False, default=NoDefault): + list.__init__(self, [self]) + self.__name__ = name + self.__covariant__ = bool(covariant) + self.__contravariant__ = bool(contravariant) + self.__infer_variance__ = bool(infer_variance) + if bound: + self.__bound__ = typing._type_check(bound, 'Bound must be a type.') + else: + self.__bound__ = None + _DefaultMixin.__init__(self, default) + + # for pickling: + def_mod = _caller() + if def_mod != 'typing_extensions': + self.__module__ = def_mod + + def __repr__(self): + if self.__infer_variance__: + prefix = '' + elif self.__covariant__: + prefix = '+' + elif self.__contravariant__: + prefix = '-' + else: + prefix = '~' + return prefix + self.__name__ + + def __hash__(self): + return object.__hash__(self) + + def __eq__(self, other): + return self is other + + def __reduce__(self): + return self.__name__ + + # Hack to get typing._type_check to pass. + def __call__(self, *args, **kwargs): + pass + + +# 3.9 +if not hasattr(typing, 'Concatenate'): + # Inherits from list as a workaround for Callable checks in Python < 3.9.2. + + # 3.9.0-1 + if not hasattr(typing, '_type_convert'): + def _type_convert(arg, module=None, *, allow_special_forms=False): + """For converting None to type(None), and strings to ForwardRef.""" + if arg is None: + return type(None) + if isinstance(arg, str): + if sys.version_info <= (3, 9, 6): + return ForwardRef(arg) + if sys.version_info <= (3, 9, 7): + return ForwardRef(arg, module=module) + return ForwardRef(arg, module=module, is_class=allow_special_forms) + return arg + else: + _type_convert = typing._type_convert + + class _ConcatenateGenericAlias(list): + + # Trick Generic into looking into this for __parameters__. + __class__ = typing._GenericAlias + + def __init__(self, origin, args): + super().__init__(args) + self.__origin__ = origin + self.__args__ = args + + def __repr__(self): + _type_repr = typing._type_repr + return (f'{_type_repr(self.__origin__)}' + f'[{", ".join(_type_repr(arg) for arg in self.__args__)}]') + + def __hash__(self): + return hash((self.__origin__, self.__args__)) + + # Hack to get typing._type_check to pass in Generic. + def __call__(self, *args, **kwargs): + pass + + @property + def __parameters__(self): + return tuple( + tp for tp in self.__args__ if isinstance(tp, (typing.TypeVar, ParamSpec)) + ) + + # 3.9 used by __getitem__ below + def copy_with(self, params): + if isinstance(params[-1], _ConcatenateGenericAlias): + params = (*params[:-1], *params[-1].__args__) + elif isinstance(params[-1], (list, tuple)): + return (*params[:-1], *params[-1]) + elif (not (params[-1] is ... or isinstance(params[-1], ParamSpec))): + raise TypeError("The last parameter to Concatenate should be a " + "ParamSpec variable or ellipsis.") + return self.__class__(self.__origin__, params) + + # 3.9; accessed during GenericAlias.__getitem__ when substituting + def __getitem__(self, args): + if self.__origin__ in (Generic, Protocol): + # Can't subscript Generic[...] or Protocol[...]. + raise TypeError(f"Cannot subscript already-subscripted {self}") + if not self.__parameters__: + raise TypeError(f"{self} is not a generic class") + + if not isinstance(args, tuple): + args = (args,) + args = _unpack_args(*(_type_convert(p) for p in args)) + params = self.__parameters__ + for param in params: + prepare = getattr(param, "__typing_prepare_subst__", None) + if prepare is not None: + args = prepare(self, args) + # 3.9 & typing.ParamSpec + elif isinstance(param, ParamSpec): + i = params.index(param) + if ( + i == len(args) + and getattr(param, '__default__', NoDefault) is not NoDefault + ): + args = [*args, param.__default__] + if i >= len(args): + raise TypeError(f"Too few arguments for {self}") + # Special case for Z[[int, str, bool]] == Z[int, str, bool] + if len(params) == 1 and not _is_param_expr(args[0]): + assert i == 0 + args = (args,) + elif ( + isinstance(args[i], list) + # 3.9 + # This class inherits from list do not convert + and not isinstance(args[i], _ConcatenateGenericAlias) + ): + args = (*args[:i], tuple(args[i]), *args[i + 1:]) + + alen = len(args) + plen = len(params) + if alen != plen: + raise TypeError( + f"Too {'many' if alen > plen else 'few'} arguments for {self};" + f" actual {alen}, expected {plen}" + ) + + subst = dict(zip(self.__parameters__, args)) + # determine new args + new_args = [] + for arg in self.__args__: + if isinstance(arg, type): + new_args.append(arg) + continue + if isinstance(arg, TypeVar): + arg = subst[arg] + if ( + (isinstance(arg, typing._GenericAlias) and _is_unpack(arg)) + or ( + hasattr(_types, "GenericAlias") + and isinstance(arg, _types.GenericAlias) + and getattr(arg, "__unpacked__", False) + ) + ): + raise TypeError(f"{arg} is not valid as type argument") + + elif isinstance(arg, + typing._GenericAlias + if not hasattr(_types, "GenericAlias") else + (typing._GenericAlias, _types.GenericAlias) + ): + subparams = arg.__parameters__ + if subparams: + subargs = tuple(subst[x] for x in subparams) + arg = arg[subargs] + new_args.append(arg) + return self.copy_with(tuple(new_args)) + +# 3.10+ +else: + _ConcatenateGenericAlias = typing._ConcatenateGenericAlias + + # 3.10 + if sys.version_info < (3, 11): + + class _ConcatenateGenericAlias(typing._ConcatenateGenericAlias, _root=True): + # needed for checks in collections.abc.Callable to accept this class + __module__ = "typing" + + def copy_with(self, params): + if isinstance(params[-1], (list, tuple)): + return (*params[:-1], *params[-1]) + if isinstance(params[-1], typing._ConcatenateGenericAlias): + params = (*params[:-1], *params[-1].__args__) + elif not (params[-1] is ... or isinstance(params[-1], ParamSpec)): + raise TypeError("The last parameter to Concatenate should be a " + "ParamSpec variable or ellipsis.") + return super(typing._ConcatenateGenericAlias, self).copy_with(params) + + def __getitem__(self, args): + value = super().__getitem__(args) + if isinstance(value, tuple) and any(_is_unpack(t) for t in value): + return tuple(_unpack_args(*(n for n in value))) + return value + + +# 3.9.2 +class _EllipsisDummy: ... + + +# <=3.10 +def _create_concatenate_alias(origin, parameters): + if parameters[-1] is ... and sys.version_info < (3, 9, 2): + # Hack: Arguments must be types, replace it with one. + parameters = (*parameters[:-1], _EllipsisDummy) + if sys.version_info >= (3, 10, 3): + concatenate = _ConcatenateGenericAlias(origin, parameters, + _typevar_types=(TypeVar, ParamSpec), + _paramspec_tvars=True) + else: + concatenate = _ConcatenateGenericAlias(origin, parameters) + if parameters[-1] is not _EllipsisDummy: + return concatenate + # Remove dummy again + concatenate.__args__ = tuple(p if p is not _EllipsisDummy else ... + for p in concatenate.__args__) + if sys.version_info < (3, 10): + # backport needs __args__ adjustment only + return concatenate + concatenate.__parameters__ = tuple(p for p in concatenate.__parameters__ + if p is not _EllipsisDummy) + return concatenate + + +# <=3.10 +@typing._tp_cache +def _concatenate_getitem(self, parameters): + if parameters == (): + raise TypeError("Cannot take a Concatenate of no types.") + if not isinstance(parameters, tuple): + parameters = (parameters,) + if not (parameters[-1] is ... or isinstance(parameters[-1], ParamSpec)): + raise TypeError("The last parameter to Concatenate should be a " + "ParamSpec variable or ellipsis.") + msg = "Concatenate[arg, ...]: each arg must be a type." + parameters = (*(typing._type_check(p, msg) for p in parameters[:-1]), + parameters[-1]) + return _create_concatenate_alias(self, parameters) + + +# 3.11+; Concatenate does not accept ellipsis in 3.10 +# Breakpoint: https://github.com/python/cpython/pull/30969 +if sys.version_info >= (3, 11): + Concatenate = typing.Concatenate +# <=3.10 +else: + @_ExtensionsSpecialForm + def Concatenate(self, parameters): + """Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a + higher order function which adds, removes or transforms parameters of a + callable. + + For example:: + + Callable[Concatenate[int, P], int] + + See PEP 612 for detailed information. + """ + return _concatenate_getitem(self, parameters) + + +# 3.10+ +if hasattr(typing, 'TypeGuard'): + TypeGuard = typing.TypeGuard +# 3.9 +else: + @_ExtensionsSpecialForm + def TypeGuard(self, parameters): + """Special typing form used to annotate the return type of a user-defined + type guard function. ``TypeGuard`` only accepts a single type argument. + At runtime, functions marked this way should return a boolean. + + ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static + type checkers to determine a more precise type of an expression within a + program's code flow. Usually type narrowing is done by analyzing + conditional code flow and applying the narrowing to a block of code. The + conditional expression here is sometimes referred to as a "type guard". + + Sometimes it would be convenient to use a user-defined boolean function + as a type guard. Such a function should use ``TypeGuard[...]`` as its + return type to alert static type checkers to this intention. + + Using ``-> TypeGuard`` tells the static type checker that for a given + function: + + 1. The return value is a boolean. + 2. If the return value is ``True``, the type of its argument + is the type inside ``TypeGuard``. + + For example:: + + def is_str(val: Union[str, float]): + # "isinstance" type guard + if isinstance(val, str): + # Type of ``val`` is narrowed to ``str`` + ... + else: + # Else, type of ``val`` is narrowed to ``float``. + ... + + Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower + form of ``TypeA`` (it can even be a wider form) and this may lead to + type-unsafe results. The main reason is to allow for things like + narrowing ``List[object]`` to ``List[str]`` even though the latter is not + a subtype of the former, since ``List`` is invariant. The responsibility of + writing type-safe type guards is left to the user. + + ``TypeGuard`` also works with type variables. For more information, see + PEP 647 (User-Defined Type Guards). + """ + item = typing._type_check(parameters, f'{self} accepts only a single type.') + return typing._GenericAlias(self, (item,)) + + +# 3.13+ +if hasattr(typing, 'TypeIs'): + TypeIs = typing.TypeIs +# <=3.12 +else: + @_ExtensionsSpecialForm + def TypeIs(self, parameters): + """Special typing form used to annotate the return type of a user-defined + type narrower function. ``TypeIs`` only accepts a single type argument. + At runtime, functions marked this way should return a boolean. + + ``TypeIs`` aims to benefit *type narrowing* -- a technique used by static + type checkers to determine a more precise type of an expression within a + program's code flow. Usually type narrowing is done by analyzing + conditional code flow and applying the narrowing to a block of code. The + conditional expression here is sometimes referred to as a "type guard". + + Sometimes it would be convenient to use a user-defined boolean function + as a type guard. Such a function should use ``TypeIs[...]`` as its + return type to alert static type checkers to this intention. + + Using ``-> TypeIs`` tells the static type checker that for a given + function: + + 1. The return value is a boolean. + 2. If the return value is ``True``, the type of its argument + is the intersection of the type inside ``TypeIs`` and the argument's + previously known type. + + For example:: + + def is_awaitable(val: object) -> TypeIs[Awaitable[Any]]: + return hasattr(val, '__await__') + + def f(val: Union[int, Awaitable[int]]) -> int: + if is_awaitable(val): + assert_type(val, Awaitable[int]) + else: + assert_type(val, int) + + ``TypeIs`` also works with type variables. For more information, see + PEP 742 (Narrowing types with TypeIs). + """ + item = typing._type_check(parameters, f'{self} accepts only a single type.') + return typing._GenericAlias(self, (item,)) + + +# 3.14+? +if hasattr(typing, 'TypeForm'): + TypeForm = typing.TypeForm +# <=3.13 +else: + class _TypeFormForm(_ExtensionsSpecialForm, _root=True): + # TypeForm(X) is equivalent to X but indicates to the type checker + # that the object is a TypeForm. + def __call__(self, obj, /): + return obj + + @_TypeFormForm + def TypeForm(self, parameters): + """A special form representing the value that results from the evaluation + of a type expression. This value encodes the information supplied in the + type expression, and it represents the type described by that type expression. + + When used in a type expression, TypeForm describes a set of type form objects. + It accepts a single type argument, which must be a valid type expression. + ``TypeForm[T]`` describes the set of all type form objects that represent + the type T or types that are assignable to T. + + Usage: + + def cast[T](typ: TypeForm[T], value: Any) -> T: ... + + reveal_type(cast(int, "x")) # int + + See PEP 747 for more information. + """ + item = typing._type_check(parameters, f'{self} accepts only a single type.') + return typing._GenericAlias(self, (item,)) + + + + +if hasattr(typing, "LiteralString"): # 3.11+ + LiteralString = typing.LiteralString +else: + @_SpecialForm + def LiteralString(self, params): + """Represents an arbitrary literal string. + + Example:: + + from typing_extensions import LiteralString + + def query(sql: LiteralString) -> ...: + ... + + query("SELECT * FROM table") # ok + query(f"SELECT * FROM {input()}") # not ok + + See PEP 675 for details. + + """ + raise TypeError(f"{self} is not subscriptable") + + +if hasattr(typing, "Self"): # 3.11+ + Self = typing.Self +else: + @_SpecialForm + def Self(self, params): + """Used to spell the type of "self" in classes. + + Example:: + + from typing import Self + + class ReturnsSelf: + def parse(self, data: bytes) -> Self: + ... + return self + + """ + + raise TypeError(f"{self} is not subscriptable") + + +if hasattr(typing, "Never"): # 3.11+ + Never = typing.Never +else: + @_SpecialForm + def Never(self, params): + """The bottom type, a type that has no members. + + This can be used to define a function that should never be + called, or a function that never returns:: + + from typing_extensions import Never + + def never_call_me(arg: Never) -> None: + pass + + def int_or_str(arg: int | str) -> None: + never_call_me(arg) # type checker error + match arg: + case int(): + print("It's an int") + case str(): + print("It's a str") + case _: + never_call_me(arg) # ok, arg is of type Never + + """ + + raise TypeError(f"{self} is not subscriptable") + + +if hasattr(typing, 'Required'): # 3.11+ + Required = typing.Required + NotRequired = typing.NotRequired +else: # <=3.10 + @_ExtensionsSpecialForm + def Required(self, parameters): + """A special typing construct to mark a key of a total=False TypedDict + as required. For example: + + class Movie(TypedDict, total=False): + title: Required[str] + year: int + + m = Movie( + title='The Matrix', # typechecker error if key is omitted + year=1999, + ) + + There is no runtime checking that a required key is actually provided + when instantiating a related TypedDict. + """ + item = typing._type_check(parameters, f'{self._name} accepts only a single type.') + return typing._GenericAlias(self, (item,)) + + @_ExtensionsSpecialForm + def NotRequired(self, parameters): + """A special typing construct to mark a key of a TypedDict as + potentially missing. For example: + + class Movie(TypedDict): + title: str + year: NotRequired[int] + + m = Movie( + title='The Matrix', # typechecker error if key is omitted + year=1999, + ) + """ + item = typing._type_check(parameters, f'{self._name} accepts only a single type.') + return typing._GenericAlias(self, (item,)) + + +if hasattr(typing, 'ReadOnly'): + ReadOnly = typing.ReadOnly +else: # <=3.12 + @_ExtensionsSpecialForm + def ReadOnly(self, parameters): + """A special typing construct to mark an item of a TypedDict as read-only. + + For example: + + class Movie(TypedDict): + title: ReadOnly[str] + year: int + + def mutate_movie(m: Movie) -> None: + m["year"] = 1992 # allowed + m["title"] = "The Matrix" # typechecker error + + There is no runtime checking for this property. + """ + item = typing._type_check(parameters, f'{self._name} accepts only a single type.') + return typing._GenericAlias(self, (item,)) + + +_UNPACK_DOC = """\ +Type unpack operator. + +The type unpack operator takes the child types from some container type, +such as `tuple[int, str]` or a `TypeVarTuple`, and 'pulls them out'. For +example: + + # For some generic class `Foo`: + Foo[Unpack[tuple[int, str]]] # Equivalent to Foo[int, str] + + Ts = TypeVarTuple('Ts') + # Specifies that `Bar` is generic in an arbitrary number of types. + # (Think of `Ts` as a tuple of an arbitrary number of individual + # `TypeVar`s, which the `Unpack` is 'pulling out' directly into the + # `Generic[]`.) + class Bar(Generic[Unpack[Ts]]): ... + Bar[int] # Valid + Bar[int, str] # Also valid + +From Python 3.11, this can also be done using the `*` operator: + + Foo[*tuple[int, str]] + class Bar(Generic[*Ts]): ... + +The operator can also be used along with a `TypedDict` to annotate +`**kwargs` in a function signature. For instance: + + class Movie(TypedDict): + name: str + year: int + + # This function expects two keyword arguments - *name* of type `str` and + # *year* of type `int`. + def foo(**kwargs: Unpack[Movie]): ... + +Note that there is only some runtime checking of this operator. Not +everything the runtime allows may be accepted by static type checkers. + +For more information, see PEP 646 and PEP 692. +""" + + +# PEP 692 changed the repr of Unpack[] +# Breakpoint: https://github.com/python/cpython/pull/104048 +if sys.version_info >= (3, 12): + Unpack = typing.Unpack + + def _is_unpack(obj): + return get_origin(obj) is Unpack + +else: # <=3.11 + class _UnpackSpecialForm(_ExtensionsSpecialForm, _root=True): + def __init__(self, getitem): + super().__init__(getitem) + self.__doc__ = _UNPACK_DOC + + class _UnpackAlias(typing._GenericAlias, _root=True): + if sys.version_info < (3, 11): + # needed for compatibility with Generic[Unpack[Ts]] + __class__ = typing.TypeVar + + @property + def __typing_unpacked_tuple_args__(self): + assert self.__origin__ is Unpack + assert len(self.__args__) == 1 + arg, = self.__args__ + if isinstance(arg, (typing._GenericAlias, _types.GenericAlias)): + if arg.__origin__ is not tuple: + raise TypeError("Unpack[...] must be used with a tuple type") + return arg.__args__ + return None + + @property + def __typing_is_unpacked_typevartuple__(self): + assert self.__origin__ is Unpack + assert len(self.__args__) == 1 + return isinstance(self.__args__[0], TypeVarTuple) + + def __getitem__(self, args): + if self.__typing_is_unpacked_typevartuple__: + return args + return super().__getitem__(args) + + @_UnpackSpecialForm + def Unpack(self, parameters): + item = typing._type_check(parameters, f'{self._name} accepts only a single type.') + return _UnpackAlias(self, (item,)) + + def _is_unpack(obj): + return isinstance(obj, _UnpackAlias) + + +def _unpack_args(*args): + newargs = [] + for arg in args: + subargs = getattr(arg, '__typing_unpacked_tuple_args__', None) + if subargs is not None and (not (subargs and subargs[-1] is ...)): + newargs.extend(subargs) + else: + newargs.append(arg) + return newargs + + +if _PEP_696_IMPLEMENTED: + from typing import TypeVarTuple + +elif hasattr(typing, "TypeVarTuple"): # 3.11+ + + # Add default parameter - PEP 696 + class TypeVarTuple(metaclass=_TypeVarLikeMeta): + """Type variable tuple.""" + + _backported_typevarlike = typing.TypeVarTuple + + def __new__(cls, name, *, default=NoDefault): + tvt = typing.TypeVarTuple(name) + _set_default(tvt, default) + _set_module(tvt) + + def _typevartuple_prepare_subst(alias, args): + params = alias.__parameters__ + typevartuple_index = params.index(tvt) + for param in params[typevartuple_index + 1:]: + if isinstance(param, TypeVarTuple): + raise TypeError( + f"More than one TypeVarTuple parameter in {alias}" + ) + + alen = len(args) + plen = len(params) + left = typevartuple_index + right = plen - typevartuple_index - 1 + var_tuple_index = None + fillarg = None + for k, arg in enumerate(args): + if not isinstance(arg, type): + subargs = getattr(arg, '__typing_unpacked_tuple_args__', None) + if subargs and len(subargs) == 2 and subargs[-1] is ...: + if var_tuple_index is not None: + raise TypeError( + "More than one unpacked " + "arbitrary-length tuple argument" + ) + var_tuple_index = k + fillarg = subargs[0] + if var_tuple_index is not None: + left = min(left, var_tuple_index) + right = min(right, alen - var_tuple_index - 1) + elif left + right > alen: + raise TypeError(f"Too few arguments for {alias};" + f" actual {alen}, expected at least {plen - 1}") + if left == alen - right and tvt.has_default(): + replacement = _unpack_args(tvt.__default__) + else: + replacement = args[left: alen - right] + + return ( + *args[:left], + *([fillarg] * (typevartuple_index - left)), + replacement, + *([fillarg] * (plen - right - left - typevartuple_index - 1)), + *args[alen - right:], + ) + + tvt.__typing_prepare_subst__ = _typevartuple_prepare_subst + return tvt + + def __init_subclass__(self, *args, **kwds): + raise TypeError("Cannot subclass special typing classes") + +else: # <=3.10 + class TypeVarTuple(_DefaultMixin): + """Type variable tuple. + + Usage:: + + Ts = TypeVarTuple('Ts') + + In the same way that a normal type variable is a stand-in for a single + type such as ``int``, a type variable *tuple* is a stand-in for a *tuple* + type such as ``Tuple[int, str]``. + + Type variable tuples can be used in ``Generic`` declarations. + Consider the following example:: + + class Array(Generic[*Ts]): ... + + The ``Ts`` type variable tuple here behaves like ``tuple[T1, T2]``, + where ``T1`` and ``T2`` are type variables. To use these type variables + as type parameters of ``Array``, we must *unpack* the type variable tuple using + the star operator: ``*Ts``. The signature of ``Array`` then behaves + as if we had simply written ``class Array(Generic[T1, T2]): ...``. + In contrast to ``Generic[T1, T2]``, however, ``Generic[*Shape]`` allows + us to parameterise the class with an *arbitrary* number of type parameters. + + Type variable tuples can be used anywhere a normal ``TypeVar`` can. + This includes class definitions, as shown above, as well as function + signatures and variable annotations:: + + class Array(Generic[*Ts]): + + def __init__(self, shape: Tuple[*Ts]): + self._shape: Tuple[*Ts] = shape + + def get_shape(self) -> Tuple[*Ts]: + return self._shape + + shape = (Height(480), Width(640)) + x: Array[Height, Width] = Array(shape) + y = abs(x) # Inferred type is Array[Height, Width] + z = x + x # ... is Array[Height, Width] + x.get_shape() # ... is tuple[Height, Width] + + """ + + # Trick Generic __parameters__. + __class__ = typing.TypeVar + + def __iter__(self): + yield self.__unpacked__ + + def __init__(self, name, *, default=NoDefault): + self.__name__ = name + _DefaultMixin.__init__(self, default) + + # for pickling: + def_mod = _caller() + if def_mod != 'typing_extensions': + self.__module__ = def_mod + + self.__unpacked__ = Unpack[self] + + def __repr__(self): + return self.__name__ + + def __hash__(self): + return object.__hash__(self) + + def __eq__(self, other): + return self is other + + def __reduce__(self): + return self.__name__ + + def __init_subclass__(self, *args, **kwds): + if '_root' not in kwds: + raise TypeError("Cannot subclass special typing classes") + + +if hasattr(typing, "reveal_type"): # 3.11+ + reveal_type = typing.reveal_type +else: # <=3.10 + def reveal_type(obj: T, /) -> T: + """Reveal the inferred type of a variable. + + When a static type checker encounters a call to ``reveal_type()``, + it will emit the inferred type of the argument:: + + x: int = 1 + reveal_type(x) + + Running a static type checker (e.g., ``mypy``) on this example + will produce output similar to 'Revealed type is "builtins.int"'. + + At runtime, the function prints the runtime type of the + argument and returns it unchanged. + + """ + print(f"Runtime type is {type(obj).__name__!r}", file=sys.stderr) + return obj + + +if hasattr(typing, "_ASSERT_NEVER_REPR_MAX_LENGTH"): # 3.11+ + _ASSERT_NEVER_REPR_MAX_LENGTH = typing._ASSERT_NEVER_REPR_MAX_LENGTH +else: # <=3.10 + _ASSERT_NEVER_REPR_MAX_LENGTH = 100 + + +if hasattr(typing, "assert_never"): # 3.11+ + assert_never = typing.assert_never +else: # <=3.10 + def assert_never(arg: Never, /) -> Never: + """Assert to the type checker that a line of code is unreachable. + + Example:: + + def int_or_str(arg: int | str) -> None: + match arg: + case int(): + print("It's an int") + case str(): + print("It's a str") + case _: + assert_never(arg) + + If a type checker finds that a call to assert_never() is + reachable, it will emit an error. + + At runtime, this throws an exception when called. + + """ + value = repr(arg) + if len(value) > _ASSERT_NEVER_REPR_MAX_LENGTH: + value = value[:_ASSERT_NEVER_REPR_MAX_LENGTH] + '...' + raise AssertionError(f"Expected code to be unreachable, but got: {value}") + + +# dataclass_transform exists in 3.11 but lacks the frozen_default parameter +# Breakpoint: https://github.com/python/cpython/pull/99958 +if sys.version_info >= (3, 12): # 3.12+ + dataclass_transform = typing.dataclass_transform +else: # <=3.11 + def dataclass_transform( + *, + eq_default: bool = True, + order_default: bool = False, + kw_only_default: bool = False, + frozen_default: bool = False, + field_specifiers: typing.Tuple[ + typing.Union[typing.Type[typing.Any], typing.Callable[..., typing.Any]], + ... + ] = (), + **kwargs: typing.Any, + ) -> typing.Callable[[T], T]: + """Decorator that marks a function, class, or metaclass as providing + dataclass-like behavior. + + Example: + + from typing_extensions import dataclass_transform + + _T = TypeVar("_T") + + # Used on a decorator function + @dataclass_transform() + def create_model(cls: type[_T]) -> type[_T]: + ... + return cls + + @create_model + class CustomerModel: + id: int + name: str + + # Used on a base class + @dataclass_transform() + class ModelBase: ... + + class CustomerModel(ModelBase): + id: int + name: str + + # Used on a metaclass + @dataclass_transform() + class ModelMeta(type): ... + + class ModelBase(metaclass=ModelMeta): ... + + class CustomerModel(ModelBase): + id: int + name: str + + Each of the ``CustomerModel`` classes defined in this example will now + behave similarly to a dataclass created with the ``@dataclasses.dataclass`` + decorator. For example, the type checker will synthesize an ``__init__`` + method. + + The arguments to this decorator can be used to customize this behavior: + - ``eq_default`` indicates whether the ``eq`` parameter is assumed to be + True or False if it is omitted by the caller. + - ``order_default`` indicates whether the ``order`` parameter is + assumed to be True or False if it is omitted by the caller. + - ``kw_only_default`` indicates whether the ``kw_only`` parameter is + assumed to be True or False if it is omitted by the caller. + - ``frozen_default`` indicates whether the ``frozen`` parameter is + assumed to be True or False if it is omitted by the caller. + - ``field_specifiers`` specifies a static list of supported classes + or functions that describe fields, similar to ``dataclasses.field()``. + + At runtime, this decorator records its arguments in the + ``__dataclass_transform__`` attribute on the decorated object. + + See PEP 681 for details. + + """ + def decorator(cls_or_fn): + cls_or_fn.__dataclass_transform__ = { + "eq_default": eq_default, + "order_default": order_default, + "kw_only_default": kw_only_default, + "frozen_default": frozen_default, + "field_specifiers": field_specifiers, + "kwargs": kwargs, + } + return cls_or_fn + return decorator + + +if hasattr(typing, "override"): # 3.12+ + override = typing.override +else: # <=3.11 + _F = typing.TypeVar("_F", bound=typing.Callable[..., typing.Any]) + + def override(arg: _F, /) -> _F: + """Indicate that a method is intended to override a method in a base class. + + Usage: + + class Base: + def method(self) -> None: + pass + + class Child(Base): + @override + def method(self) -> None: + super().method() + + When this decorator is applied to a method, the type checker will + validate that it overrides a method with the same name on a base class. + This helps prevent bugs that may occur when a base class is changed + without an equivalent change to a child class. + + There is no runtime checking of these properties. The decorator + sets the ``__override__`` attribute to ``True`` on the decorated object + to allow runtime introspection. + + See PEP 698 for details. + + """ + try: + arg.__override__ = True + except (AttributeError, TypeError): + # Skip the attribute silently if it is not writable. + # AttributeError happens if the object has __slots__ or a + # read-only property, TypeError if it's a builtin class. + pass + return arg + + +# Python 3.13.3+ contains a fix for the wrapped __new__ +# Breakpoint: https://github.com/python/cpython/pull/132160 +if sys.version_info >= (3, 13, 3): + deprecated = warnings.deprecated +else: + _T = typing.TypeVar("_T") + + class deprecated: + """Indicate that a class, function or overload is deprecated. + + When this decorator is applied to an object, the type checker + will generate a diagnostic on usage of the deprecated object. + + Usage: + + @deprecated("Use B instead") + class A: + pass + + @deprecated("Use g instead") + def f(): + pass + + @overload + @deprecated("int support is deprecated") + def g(x: int) -> int: ... + @overload + def g(x: str) -> int: ... + + The warning specified by *category* will be emitted at runtime + on use of deprecated objects. For functions, that happens on calls; + for classes, on instantiation and on creation of subclasses. + If the *category* is ``None``, no warning is emitted at runtime. + The *stacklevel* determines where the + warning is emitted. If it is ``1`` (the default), the warning + is emitted at the direct caller of the deprecated object; if it + is higher, it is emitted further up the stack. + Static type checker behavior is not affected by the *category* + and *stacklevel* arguments. + + The deprecation message passed to the decorator is saved in the + ``__deprecated__`` attribute on the decorated object. + If applied to an overload, the decorator + must be after the ``@overload`` decorator for the attribute to + exist on the overload as returned by ``get_overloads()``. + + See PEP 702 for details. + + """ + def __init__( + self, + message: str, + /, + *, + category: typing.Optional[typing.Type[Warning]] = DeprecationWarning, + stacklevel: int = 1, + ) -> None: + if not isinstance(message, str): + raise TypeError( + "Expected an object of type str for 'message', not " + f"{type(message).__name__!r}" + ) + self.message = message + self.category = category + self.stacklevel = stacklevel + + def __call__(self, arg: _T, /) -> _T: + # Make sure the inner functions created below don't + # retain a reference to self. + msg = self.message + category = self.category + stacklevel = self.stacklevel + if category is None: + arg.__deprecated__ = msg + return arg + elif isinstance(arg, type): + import functools + from types import MethodType + + original_new = arg.__new__ + + @functools.wraps(original_new) + def __new__(cls, /, *args, **kwargs): + if cls is arg: + warnings.warn(msg, category=category, stacklevel=stacklevel + 1) + if original_new is not object.__new__: + return original_new(cls, *args, **kwargs) + # Mirrors a similar check in object.__new__. + elif cls.__init__ is object.__init__ and (args or kwargs): + raise TypeError(f"{cls.__name__}() takes no arguments") + else: + return original_new(cls) + + arg.__new__ = staticmethod(__new__) + + original_init_subclass = arg.__init_subclass__ + # We need slightly different behavior if __init_subclass__ + # is a bound method (likely if it was implemented in Python) + if isinstance(original_init_subclass, MethodType): + original_init_subclass = original_init_subclass.__func__ + + @functools.wraps(original_init_subclass) + def __init_subclass__(*args, **kwargs): + warnings.warn(msg, category=category, stacklevel=stacklevel + 1) + return original_init_subclass(*args, **kwargs) + + arg.__init_subclass__ = classmethod(__init_subclass__) + # Or otherwise, which likely means it's a builtin such as + # object's implementation of __init_subclass__. + else: + @functools.wraps(original_init_subclass) + def __init_subclass__(*args, **kwargs): + warnings.warn(msg, category=category, stacklevel=stacklevel + 1) + return original_init_subclass(*args, **kwargs) + + arg.__init_subclass__ = __init_subclass__ + + arg.__deprecated__ = __new__.__deprecated__ = msg + __init_subclass__.__deprecated__ = msg + return arg + elif callable(arg): + import asyncio.coroutines + import functools + import inspect + + @functools.wraps(arg) + def wrapper(*args, **kwargs): + warnings.warn(msg, category=category, stacklevel=stacklevel + 1) + return arg(*args, **kwargs) + + if asyncio.coroutines.iscoroutinefunction(arg): + # Breakpoint: https://github.com/python/cpython/pull/99247 + if sys.version_info >= (3, 12): + wrapper = inspect.markcoroutinefunction(wrapper) + else: + wrapper._is_coroutine = asyncio.coroutines._is_coroutine + + arg.__deprecated__ = wrapper.__deprecated__ = msg + return wrapper + else: + raise TypeError( + "@deprecated decorator with non-None category must be applied to " + f"a class or callable, not {arg!r}" + ) + +# Breakpoint: https://github.com/python/cpython/pull/23702 +if sys.version_info < (3, 10): + def _is_param_expr(arg): + return arg is ... or isinstance( + arg, (tuple, list, ParamSpec, _ConcatenateGenericAlias) + ) +else: + def _is_param_expr(arg): + return arg is ... or isinstance( + arg, + ( + tuple, + list, + ParamSpec, + _ConcatenateGenericAlias, + typing._ConcatenateGenericAlias, + ), + ) + + +# We have to do some monkey patching to deal with the dual nature of +# Unpack/TypeVarTuple: +# - We want Unpack to be a kind of TypeVar so it gets accepted in +# Generic[Unpack[Ts]] +# - We want it to *not* be treated as a TypeVar for the purposes of +# counting generic parameters, so that when we subscript a generic, +# the runtime doesn't try to substitute the Unpack with the subscripted type. +if not hasattr(typing, "TypeVarTuple"): + def _check_generic(cls, parameters, elen=_marker): + """Check correct count for parameters of a generic cls (internal helper). + + This gives a nice error message in case of count mismatch. + """ + # If substituting a single ParamSpec with multiple arguments + # we do not check the count + if (inspect.isclass(cls) and issubclass(cls, typing.Generic) + and len(cls.__parameters__) == 1 + and isinstance(cls.__parameters__[0], ParamSpec) + and parameters + and not _is_param_expr(parameters[0]) + ): + # Generic modifies parameters variable, but here we cannot do this + return + + if not elen: + raise TypeError(f"{cls} is not a generic class") + if elen is _marker: + if not hasattr(cls, "__parameters__") or not cls.__parameters__: + raise TypeError(f"{cls} is not a generic class") + elen = len(cls.__parameters__) + alen = len(parameters) + if alen != elen: + expect_val = elen + if hasattr(cls, "__parameters__"): + parameters = [p for p in cls.__parameters__ if not _is_unpack(p)] + num_tv_tuples = sum(isinstance(p, TypeVarTuple) for p in parameters) + if (num_tv_tuples > 0) and (alen >= elen - num_tv_tuples): + return + + # deal with TypeVarLike defaults + # required TypeVarLikes cannot appear after a defaulted one. + if alen < elen: + # since we validate TypeVarLike default in _collect_type_vars + # or _collect_parameters we can safely check parameters[alen] + if ( + getattr(parameters[alen], '__default__', NoDefault) + is not NoDefault + ): + return + + num_default_tv = sum(getattr(p, '__default__', NoDefault) + is not NoDefault for p in parameters) + + elen -= num_default_tv + + expect_val = f"at least {elen}" + + # Breakpoint: https://github.com/python/cpython/pull/27515 + things = "arguments" if sys.version_info >= (3, 10) else "parameters" + raise TypeError(f"Too {'many' if alen > elen else 'few'} {things}" + f" for {cls}; actual {alen}, expected {expect_val}") +else: + # Python 3.11+ + + def _check_generic(cls, parameters, elen): + """Check correct count for parameters of a generic cls (internal helper). + + This gives a nice error message in case of count mismatch. + """ + if not elen: + raise TypeError(f"{cls} is not a generic class") + alen = len(parameters) + if alen != elen: + expect_val = elen + if hasattr(cls, "__parameters__"): + parameters = [p for p in cls.__parameters__ if not _is_unpack(p)] + + # deal with TypeVarLike defaults + # required TypeVarLikes cannot appear after a defaulted one. + if alen < elen: + # since we validate TypeVarLike default in _collect_type_vars + # or _collect_parameters we can safely check parameters[alen] + if ( + getattr(parameters[alen], '__default__', NoDefault) + is not NoDefault + ): + return + + num_default_tv = sum(getattr(p, '__default__', NoDefault) + is not NoDefault for p in parameters) + + elen -= num_default_tv + + expect_val = f"at least {elen}" + + raise TypeError(f"Too {'many' if alen > elen else 'few'} arguments" + f" for {cls}; actual {alen}, expected {expect_val}") + +if not _PEP_696_IMPLEMENTED: + typing._check_generic = _check_generic + + +def _has_generic_or_protocol_as_origin() -> bool: + try: + frame = sys._getframe(2) + # - Catch AttributeError: not all Python implementations have sys._getframe() + # - Catch ValueError: maybe we're called from an unexpected module + # and the call stack isn't deep enough + except (AttributeError, ValueError): + return False # err on the side of leniency + else: + # If we somehow get invoked from outside typing.py, + # also err on the side of leniency + if frame.f_globals.get("__name__") != "typing": + return False + origin = frame.f_locals.get("origin") + # Cannot use "in" because origin may be an object with a buggy __eq__ that + # throws an error. + return origin is typing.Generic or origin is Protocol or origin is typing.Protocol + + +_TYPEVARTUPLE_TYPES = {TypeVarTuple, getattr(typing, "TypeVarTuple", None)} + + +def _is_unpacked_typevartuple(x) -> bool: + if get_origin(x) is not Unpack: + return False + args = get_args(x) + return ( + bool(args) + and len(args) == 1 + and type(args[0]) in _TYPEVARTUPLE_TYPES + ) + + +# Python 3.11+ _collect_type_vars was renamed to _collect_parameters +if hasattr(typing, '_collect_type_vars'): + def _collect_type_vars(types, typevar_types=None): + """Collect all type variable contained in types in order of + first appearance (lexicographic order). For example:: + + _collect_type_vars((T, List[S, T])) == (T, S) + """ + if typevar_types is None: + typevar_types = typing.TypeVar + tvars = [] + + # A required TypeVarLike cannot appear after a TypeVarLike with a default + # if it was a direct call to `Generic[]` or `Protocol[]` + enforce_default_ordering = _has_generic_or_protocol_as_origin() + default_encountered = False + + # Also, a TypeVarLike with a default cannot appear after a TypeVarTuple + type_var_tuple_encountered = False + + for t in types: + if _is_unpacked_typevartuple(t): + type_var_tuple_encountered = True + elif ( + isinstance(t, typevar_types) and not isinstance(t, _UnpackAlias) + and t not in tvars + ): + if enforce_default_ordering: + has_default = getattr(t, '__default__', NoDefault) is not NoDefault + if has_default: + if type_var_tuple_encountered: + raise TypeError('Type parameter with a default' + ' follows TypeVarTuple') + default_encountered = True + elif default_encountered: + raise TypeError(f'Type parameter {t!r} without a default' + ' follows type parameter with a default') + + tvars.append(t) + if _should_collect_from_parameters(t): + tvars.extend([t for t in t.__parameters__ if t not in tvars]) + elif isinstance(t, tuple): + # Collect nested type_vars + # tuple wrapped by _prepare_paramspec_params(cls, params) + for x in t: + for collected in _collect_type_vars([x]): + if collected not in tvars: + tvars.append(collected) + return tuple(tvars) + + typing._collect_type_vars = _collect_type_vars +else: + def _collect_parameters(args): + """Collect all type variables and parameter specifications in args + in order of first appearance (lexicographic order). + + For example:: + + assert _collect_parameters((T, Callable[P, T])) == (T, P) + """ + parameters = [] + + # A required TypeVarLike cannot appear after a TypeVarLike with default + # if it was a direct call to `Generic[]` or `Protocol[]` + enforce_default_ordering = _has_generic_or_protocol_as_origin() + default_encountered = False + + # Also, a TypeVarLike with a default cannot appear after a TypeVarTuple + type_var_tuple_encountered = False + + for t in args: + if isinstance(t, type): + # We don't want __parameters__ descriptor of a bare Python class. + pass + elif isinstance(t, tuple): + # `t` might be a tuple, when `ParamSpec` is substituted with + # `[T, int]`, or `[int, *Ts]`, etc. + for x in t: + for collected in _collect_parameters([x]): + if collected not in parameters: + parameters.append(collected) + elif hasattr(t, '__typing_subst__'): + if t not in parameters: + if enforce_default_ordering: + has_default = ( + getattr(t, '__default__', NoDefault) is not NoDefault + ) + + if type_var_tuple_encountered and has_default: + raise TypeError('Type parameter with a default' + ' follows TypeVarTuple') + + if has_default: + default_encountered = True + elif default_encountered: + raise TypeError(f'Type parameter {t!r} without a default' + ' follows type parameter with a default') + + parameters.append(t) + else: + if _is_unpacked_typevartuple(t): + type_var_tuple_encountered = True + for x in getattr(t, '__parameters__', ()): + if x not in parameters: + parameters.append(x) + + return tuple(parameters) + + if not _PEP_696_IMPLEMENTED: + typing._collect_parameters = _collect_parameters + +# Backport typing.NamedTuple as it exists in Python 3.13. +# In 3.11, the ability to define generic `NamedTuple`s was supported. +# This was explicitly disallowed in 3.9-3.10, and only half-worked in <=3.8. +# On 3.12, we added __orig_bases__ to call-based NamedTuples +# On 3.13, we deprecated kwargs-based NamedTuples +# Breakpoint: https://github.com/python/cpython/pull/105609 +if sys.version_info >= (3, 13): + NamedTuple = typing.NamedTuple +else: + def _make_nmtuple(name, types, module, defaults=()): + fields = [n for n, t in types] + annotations = {n: typing._type_check(t, f"field {n} annotation must be a type") + for n, t in types} + nm_tpl = collections.namedtuple(name, fields, + defaults=defaults, module=module) + nm_tpl.__annotations__ = nm_tpl.__new__.__annotations__ = annotations + return nm_tpl + + _prohibited_namedtuple_fields = typing._prohibited + _special_namedtuple_fields = frozenset({'__module__', '__name__', '__annotations__'}) + + class _NamedTupleMeta(type): + def __new__(cls, typename, bases, ns): + assert _NamedTuple in bases + for base in bases: + if base is not _NamedTuple and base is not typing.Generic: + raise TypeError( + 'can only inherit from a NamedTuple type and Generic') + bases = tuple(tuple if base is _NamedTuple else base for base in bases) + if "__annotations__" in ns: + types = ns["__annotations__"] + elif "__annotate__" in ns: + # TODO: Use inspect.VALUE here, and make the annotations lazily evaluated + types = ns["__annotate__"](1) + else: + types = {} + default_names = [] + for field_name in types: + if field_name in ns: + default_names.append(field_name) + elif default_names: + raise TypeError(f"Non-default namedtuple field {field_name} " + f"cannot follow default field" + f"{'s' if len(default_names) > 1 else ''} " + f"{', '.join(default_names)}") + nm_tpl = _make_nmtuple( + typename, types.items(), + defaults=[ns[n] for n in default_names], + module=ns['__module__'] + ) + nm_tpl.__bases__ = bases + if typing.Generic in bases: + if hasattr(typing, '_generic_class_getitem'): # 3.12+ + nm_tpl.__class_getitem__ = classmethod(typing._generic_class_getitem) + else: + class_getitem = typing.Generic.__class_getitem__.__func__ + nm_tpl.__class_getitem__ = classmethod(class_getitem) + # update from user namespace without overriding special namedtuple attributes + for key, val in ns.items(): + if key in _prohibited_namedtuple_fields: + raise AttributeError("Cannot overwrite NamedTuple attribute " + key) + elif key not in _special_namedtuple_fields: + if key not in nm_tpl._fields: + setattr(nm_tpl, key, ns[key]) + try: + set_name = type(val).__set_name__ + except AttributeError: + pass + else: + try: + set_name(val, nm_tpl, key) + except BaseException as e: + msg = ( + f"Error calling __set_name__ on {type(val).__name__!r} " + f"instance {key!r} in {typename!r}" + ) + # BaseException.add_note() existed on py311, + # but the __set_name__ machinery didn't start + # using add_note() until py312. + # Making sure exceptions are raised in the same way + # as in "normal" classes seems most important here. + # Breakpoint: https://github.com/python/cpython/pull/95915 + if sys.version_info >= (3, 12): + e.add_note(msg) + raise + else: + raise RuntimeError(msg) from e + + if typing.Generic in bases: + nm_tpl.__init_subclass__() + return nm_tpl + + _NamedTuple = type.__new__(_NamedTupleMeta, 'NamedTuple', (), {}) + + def _namedtuple_mro_entries(bases): + assert NamedTuple in bases + return (_NamedTuple,) + + def NamedTuple(typename, fields=_marker, /, **kwargs): + """Typed version of namedtuple. + + Usage:: + + class Employee(NamedTuple): + name: str + id: int + + This is equivalent to:: + + Employee = collections.namedtuple('Employee', ['name', 'id']) + + The resulting class has an extra __annotations__ attribute, giving a + dict that maps field names to types. (The field names are also in + the _fields attribute, which is part of the namedtuple API.) + An alternative equivalent functional syntax is also accepted:: + + Employee = NamedTuple('Employee', [('name', str), ('id', int)]) + """ + if fields is _marker: + if kwargs: + deprecated_thing = "Creating NamedTuple classes using keyword arguments" + deprecation_msg = ( + "{name} is deprecated and will be disallowed in Python {remove}. " + "Use the class-based or functional syntax instead." + ) + else: + deprecated_thing = "Failing to pass a value for the 'fields' parameter" + example = f"`{typename} = NamedTuple({typename!r}, [])`" + deprecation_msg = ( + "{name} is deprecated and will be disallowed in Python {remove}. " + "To create a NamedTuple class with 0 fields " + "using the functional syntax, " + "pass an empty list, e.g. " + ) + example + "." + elif fields is None: + if kwargs: + raise TypeError( + "Cannot pass `None` as the 'fields' parameter " + "and also specify fields using keyword arguments" + ) + else: + deprecated_thing = "Passing `None` as the 'fields' parameter" + example = f"`{typename} = NamedTuple({typename!r}, [])`" + deprecation_msg = ( + "{name} is deprecated and will be disallowed in Python {remove}. " + "To create a NamedTuple class with 0 fields " + "using the functional syntax, " + "pass an empty list, e.g. " + ) + example + "." + elif kwargs: + raise TypeError("Either list of fields or keywords" + " can be provided to NamedTuple, not both") + if fields is _marker or fields is None: + warnings.warn( + deprecation_msg.format(name=deprecated_thing, remove="3.15"), + DeprecationWarning, + stacklevel=2, + ) + fields = kwargs.items() + nt = _make_nmtuple(typename, fields, module=_caller()) + nt.__orig_bases__ = (NamedTuple,) + return nt + + NamedTuple.__mro_entries__ = _namedtuple_mro_entries + + +if hasattr(collections.abc, "Buffer"): + Buffer = collections.abc.Buffer +else: + class Buffer(abc.ABC): # noqa: B024 + """Base class for classes that implement the buffer protocol. + + The buffer protocol allows Python objects to expose a low-level + memory buffer interface. Before Python 3.12, it is not possible + to implement the buffer protocol in pure Python code, or even + to check whether a class implements the buffer protocol. In + Python 3.12 and higher, the ``__buffer__`` method allows access + to the buffer protocol from Python code, and the + ``collections.abc.Buffer`` ABC allows checking whether a class + implements the buffer protocol. + + To indicate support for the buffer protocol in earlier versions, + inherit from this ABC, either in a stub file or at runtime, + or use ABC registration. This ABC provides no methods, because + there is no Python-accessible methods shared by pre-3.12 buffer + classes. It is useful primarily for static checks. + + """ + + # As a courtesy, register the most common stdlib buffer classes. + Buffer.register(memoryview) + Buffer.register(bytearray) + Buffer.register(bytes) + + +# Backport of types.get_original_bases, available on 3.12+ in CPython +if hasattr(_types, "get_original_bases"): + get_original_bases = _types.get_original_bases +else: + def get_original_bases(cls, /): + """Return the class's "original" bases prior to modification by `__mro_entries__`. + + Examples:: + + from typing import TypeVar, Generic + from typing_extensions import NamedTuple, TypedDict + + T = TypeVar("T") + class Foo(Generic[T]): ... + class Bar(Foo[int], float): ... + class Baz(list[str]): ... + Eggs = NamedTuple("Eggs", [("a", int), ("b", str)]) + Spam = TypedDict("Spam", {"a": int, "b": str}) + + assert get_original_bases(Bar) == (Foo[int], float) + assert get_original_bases(Baz) == (list[str],) + assert get_original_bases(Eggs) == (NamedTuple,) + assert get_original_bases(Spam) == (TypedDict,) + assert get_original_bases(int) == (object,) + """ + try: + return cls.__dict__.get("__orig_bases__", cls.__bases__) + except AttributeError: + raise TypeError( + f'Expected an instance of type, not {type(cls).__name__!r}' + ) from None + + +# NewType is a class on Python 3.10+, making it pickleable +# The error message for subclassing instances of NewType was improved on 3.11+ +# Breakpoint: https://github.com/python/cpython/pull/30268 +if sys.version_info >= (3, 11): + NewType = typing.NewType +else: + class NewType: + """NewType creates simple unique types with almost zero + runtime overhead. NewType(name, tp) is considered a subtype of tp + by static type checkers. At runtime, NewType(name, tp) returns + a dummy callable that simply returns its argument. Usage:: + UserId = NewType('UserId', int) + def name_by_id(user_id: UserId) -> str: + ... + UserId('user') # Fails type check + name_by_id(42) # Fails type check + name_by_id(UserId(42)) # OK + num = UserId(5) + 1 # type: int + """ + + def __call__(self, obj, /): + return obj + + def __init__(self, name, tp): + self.__qualname__ = name + if '.' in name: + name = name.rpartition('.')[-1] + self.__name__ = name + self.__supertype__ = tp + def_mod = _caller() + if def_mod != 'typing_extensions': + self.__module__ = def_mod + + def __mro_entries__(self, bases): + # We defined __mro_entries__ to get a better error message + # if a user attempts to subclass a NewType instance. bpo-46170 + supercls_name = self.__name__ + + class Dummy: + def __init_subclass__(cls): + subcls_name = cls.__name__ + raise TypeError( + f"Cannot subclass an instance of NewType. " + f"Perhaps you were looking for: " + f"`{subcls_name} = NewType({subcls_name!r}, {supercls_name})`" + ) + + return (Dummy,) + + def __repr__(self): + return f'{self.__module__}.{self.__qualname__}' + + def __reduce__(self): + return self.__qualname__ + + # Breakpoint: https://github.com/python/cpython/pull/21515 + if sys.version_info >= (3, 10): + # PEP 604 methods + # It doesn't make sense to have these methods on Python <3.10 + + def __or__(self, other): + return typing.Union[self, other] + + def __ror__(self, other): + return typing.Union[other, self] + + +# Breakpoint: https://github.com/python/cpython/pull/124795 +if sys.version_info >= (3, 14): + TypeAliasType = typing.TypeAliasType +# <=3.13 +else: + # Breakpoint: https://github.com/python/cpython/pull/103764 + if sys.version_info >= (3, 12): + # 3.12-3.13 + def _is_unionable(obj): + """Corresponds to is_unionable() in unionobject.c in CPython.""" + return obj is None or isinstance(obj, ( + type, + _types.GenericAlias, + _types.UnionType, + typing.TypeAliasType, + TypeAliasType, + )) + else: + # <=3.11 + def _is_unionable(obj): + """Corresponds to is_unionable() in unionobject.c in CPython.""" + return obj is None or isinstance(obj, ( + type, + _types.GenericAlias, + _types.UnionType, + TypeAliasType, + )) + + if sys.version_info < (3, 10): + # Copied and pasted from https://github.com/python/cpython/blob/986a4e1b6fcae7fe7a1d0a26aea446107dd58dd2/Objects/genericaliasobject.c#L568-L582, + # so that we emulate the behaviour of `types.GenericAlias` + # on the latest versions of CPython + _ATTRIBUTE_DELEGATION_EXCLUSIONS = frozenset({ + "__class__", + "__bases__", + "__origin__", + "__args__", + "__unpacked__", + "__parameters__", + "__typing_unpacked_tuple_args__", + "__mro_entries__", + "__reduce_ex__", + "__reduce__", + "__copy__", + "__deepcopy__", + }) + + class _TypeAliasGenericAlias(typing._GenericAlias, _root=True): + def __getattr__(self, attr): + if attr in _ATTRIBUTE_DELEGATION_EXCLUSIONS: + return object.__getattr__(self, attr) + return getattr(self.__origin__, attr) + + + class TypeAliasType: + """Create named, parameterized type aliases. + + This provides a backport of the new `type` statement in Python 3.12: + + type ListOrSet[T] = list[T] | set[T] + + is equivalent to: + + T = TypeVar("T") + ListOrSet = TypeAliasType("ListOrSet", list[T] | set[T], type_params=(T,)) + + The name ListOrSet can then be used as an alias for the type it refers to. + + The type_params argument should contain all the type parameters used + in the value of the type alias. If the alias is not generic, this + argument is omitted. + + Static type checkers should only support type aliases declared using + TypeAliasType that follow these rules: + + - The first argument (the name) must be a string literal. + - The TypeAliasType instance must be immediately assigned to a variable + of the same name. (For example, 'X = TypeAliasType("Y", int)' is invalid, + as is 'X, Y = TypeAliasType("X", int), TypeAliasType("Y", int)'). + + """ + + def __init__(self, name: str, value, *, type_params=()): + if not isinstance(name, str): + raise TypeError("TypeAliasType name must be a string") + if not isinstance(type_params, tuple): + raise TypeError("type_params must be a tuple") + self.__value__ = value + self.__type_params__ = type_params + + default_value_encountered = False + parameters = [] + for type_param in type_params: + if ( + not isinstance(type_param, (TypeVar, TypeVarTuple, ParamSpec)) + # <=3.11 + # Unpack Backport passes isinstance(type_param, TypeVar) + or _is_unpack(type_param) + ): + raise TypeError(f"Expected a type param, got {type_param!r}") + has_default = ( + getattr(type_param, '__default__', NoDefault) is not NoDefault + ) + if default_value_encountered and not has_default: + raise TypeError(f"non-default type parameter '{type_param!r}'" + " follows default type parameter") + if has_default: + default_value_encountered = True + if isinstance(type_param, TypeVarTuple): + parameters.extend(type_param) + else: + parameters.append(type_param) + self.__parameters__ = tuple(parameters) + def_mod = _caller() + if def_mod != 'typing_extensions': + self.__module__ = def_mod + # Setting this attribute closes the TypeAliasType from further modification + self.__name__ = name + + def __setattr__(self, name: str, value: object, /) -> None: + if hasattr(self, "__name__"): + self._raise_attribute_error(name) + super().__setattr__(name, value) + + def __delattr__(self, name: str, /) -> Never: + self._raise_attribute_error(name) + + def _raise_attribute_error(self, name: str) -> Never: + # Match the Python 3.12 error messages exactly + if name == "__name__": + raise AttributeError("readonly attribute") + elif name in {"__value__", "__type_params__", "__parameters__", "__module__"}: + raise AttributeError( + f"attribute '{name}' of 'typing.TypeAliasType' objects " + "is not writable" + ) + else: + raise AttributeError( + f"'typing.TypeAliasType' object has no attribute '{name}'" + ) + + def __repr__(self) -> str: + return self.__name__ + + if sys.version_info < (3, 11): + def _check_single_param(self, param, recursion=0): + # Allow [], [int], [int, str], [int, ...], [int, T] + if param is ...: + return ... + if param is None: + return None + # Note in <= 3.9 _ConcatenateGenericAlias inherits from list + if isinstance(param, list) and recursion == 0: + return [self._check_single_param(arg, recursion+1) + for arg in param] + return typing._type_check( + param, f'Subscripting {self.__name__} requires a type.' + ) + + def _check_parameters(self, parameters): + if sys.version_info < (3, 11): + return tuple( + self._check_single_param(item) + for item in parameters + ) + return tuple(typing._type_check( + item, f'Subscripting {self.__name__} requires a type.' + ) + for item in parameters + ) + + def __getitem__(self, parameters): + if not self.__type_params__: + raise TypeError("Only generic type aliases are subscriptable") + if not isinstance(parameters, tuple): + parameters = (parameters,) + # Using 3.9 here will create problems with Concatenate + if sys.version_info >= (3, 10): + return _types.GenericAlias(self, parameters) + type_vars = _collect_type_vars(parameters) + parameters = self._check_parameters(parameters) + alias = _TypeAliasGenericAlias(self, parameters) + # alias.__parameters__ is not complete if Concatenate is present + # as it is converted to a list from which no parameters are extracted. + if alias.__parameters__ != type_vars: + alias.__parameters__ = type_vars + return alias + + def __reduce__(self): + return self.__name__ + + def __init_subclass__(cls, *args, **kwargs): + raise TypeError( + "type 'typing_extensions.TypeAliasType' is not an acceptable base type" + ) + + # The presence of this method convinces typing._type_check + # that TypeAliasTypes are types. + def __call__(self): + raise TypeError("Type alias is not callable") + + # Breakpoint: https://github.com/python/cpython/pull/21515 + if sys.version_info >= (3, 10): + def __or__(self, right): + # For forward compatibility with 3.12, reject Unions + # that are not accepted by the built-in Union. + if not _is_unionable(right): + return NotImplemented + return typing.Union[self, right] + + def __ror__(self, left): + if not _is_unionable(left): + return NotImplemented + return typing.Union[left, self] + + +if hasattr(typing, "is_protocol"): + is_protocol = typing.is_protocol + get_protocol_members = typing.get_protocol_members +else: + def is_protocol(tp: type, /) -> bool: + """Return True if the given type is a Protocol. + + Example:: + + >>> from typing_extensions import Protocol, is_protocol + >>> class P(Protocol): + ... def a(self) -> str: ... + ... b: int + >>> is_protocol(P) + True + >>> is_protocol(int) + False + """ + return ( + isinstance(tp, type) + and getattr(tp, '_is_protocol', False) + and tp is not Protocol + and tp is not typing.Protocol + ) + + def get_protocol_members(tp: type, /) -> typing.FrozenSet[str]: + """Return the set of members defined in a Protocol. + + Example:: + + >>> from typing_extensions import Protocol, get_protocol_members + >>> class P(Protocol): + ... def a(self) -> str: ... + ... b: int + >>> get_protocol_members(P) + frozenset({'a', 'b'}) + + Raise a TypeError for arguments that are not Protocols. + """ + if not is_protocol(tp): + raise TypeError(f'{tp!r} is not a Protocol') + if hasattr(tp, '__protocol_attrs__'): + return frozenset(tp.__protocol_attrs__) + return frozenset(_get_protocol_attrs(tp)) + + +if hasattr(typing, "Doc"): + Doc = typing.Doc +else: + class Doc: + """Define the documentation of a type annotation using ``Annotated``, to be + used in class attributes, function and method parameters, return values, + and variables. + + The value should be a positional-only string literal to allow static tools + like editors and documentation generators to use it. + + This complements docstrings. + + The string value passed is available in the attribute ``documentation``. + + Example:: + + >>> from typing_extensions import Annotated, Doc + >>> def hi(to: Annotated[str, Doc("Who to say hi to")]) -> None: ... + """ + def __init__(self, documentation: str, /) -> None: + self.documentation = documentation + + def __repr__(self) -> str: + return f"Doc({self.documentation!r})" + + def __hash__(self) -> int: + return hash(self.documentation) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Doc): + return NotImplemented + return self.documentation == other.documentation + + +_CapsuleType = getattr(_types, "CapsuleType", None) + +if _CapsuleType is None: + try: + import _socket + except ImportError: + pass + else: + _CAPI = getattr(_socket, "CAPI", None) + if _CAPI is not None: + _CapsuleType = type(_CAPI) + +if _CapsuleType is not None: + CapsuleType = _CapsuleType + __all__.append("CapsuleType") + + +if sys.version_info >= (3, 14): + from annotationlib import Format, get_annotations +else: + # Available since Python 3.14.0a3 + # PR: https://github.com/python/cpython/pull/124415 + class Format(enum.IntEnum): + VALUE = 1 + VALUE_WITH_FAKE_GLOBALS = 2 + FORWARDREF = 3 + STRING = 4 + + # Available since Python 3.14.0a1 + # PR: https://github.com/python/cpython/pull/119891 + def get_annotations(obj, *, globals=None, locals=None, eval_str=False, + format=Format.VALUE): + """Compute the annotations dict for an object. + + obj may be a callable, class, or module. + Passing in an object of any other type raises TypeError. + + Returns a dict. get_annotations() returns a new dict every time + it's called; calling it twice on the same object will return two + different but equivalent dicts. + + This is a backport of `inspect.get_annotations`, which has been + in the standard library since Python 3.10. See the standard library + documentation for more: + + https://docs.python.org/3/library/inspect.html#inspect.get_annotations + + This backport adds the *format* argument introduced by PEP 649. The + three formats supported are: + * VALUE: the annotations are returned as-is. This is the default and + it is compatible with the behavior on previous Python versions. + * FORWARDREF: return annotations as-is if possible, but replace any + undefined names with ForwardRef objects. The implementation proposed by + PEP 649 relies on language changes that cannot be backported; the + typing-extensions implementation simply returns the same result as VALUE. + * STRING: return annotations as strings, in a format close to the original + source. Again, this behavior cannot be replicated directly in a backport. + As an approximation, typing-extensions retrieves the annotations under + VALUE semantics and then stringifies them. + + The purpose of this backport is to allow users who would like to use + FORWARDREF or STRING semantics once PEP 649 is implemented, but who also + want to support earlier Python versions, to simply write: + + typing_extensions.get_annotations(obj, format=Format.FORWARDREF) + + """ + format = Format(format) + if format is Format.VALUE_WITH_FAKE_GLOBALS: + raise ValueError( + "The VALUE_WITH_FAKE_GLOBALS format is for internal use only" + ) + + if eval_str and format is not Format.VALUE: + raise ValueError("eval_str=True is only supported with format=Format.VALUE") + + if isinstance(obj, type): + # class + obj_dict = getattr(obj, '__dict__', None) + if obj_dict and hasattr(obj_dict, 'get'): + ann = obj_dict.get('__annotations__', None) + if isinstance(ann, _types.GetSetDescriptorType): + ann = None + else: + ann = None + + obj_globals = None + module_name = getattr(obj, '__module__', None) + if module_name: + module = sys.modules.get(module_name, None) + if module: + obj_globals = getattr(module, '__dict__', None) + obj_locals = dict(vars(obj)) + unwrap = obj + elif isinstance(obj, _types.ModuleType): + # module + ann = getattr(obj, '__annotations__', None) + obj_globals = obj.__dict__ + obj_locals = None + unwrap = None + elif callable(obj): + # this includes types.Function, types.BuiltinFunctionType, + # types.BuiltinMethodType, functools.partial, functools.singledispatch, + # "class funclike" from Lib/test/test_inspect... on and on it goes. + ann = getattr(obj, '__annotations__', None) + obj_globals = getattr(obj, '__globals__', None) + obj_locals = None + unwrap = obj + elif hasattr(obj, '__annotations__'): + ann = obj.__annotations__ + obj_globals = obj_locals = unwrap = None + else: + raise TypeError(f"{obj!r} is not a module, class, or callable.") + + if ann is None: + return {} + + if not isinstance(ann, dict): + raise ValueError(f"{obj!r}.__annotations__ is neither a dict nor None") + + if not ann: + return {} + + if not eval_str: + if format is Format.STRING: + return { + key: value if isinstance(value, str) else typing._type_repr(value) + for key, value in ann.items() + } + return dict(ann) + + if unwrap is not None: + while True: + if hasattr(unwrap, '__wrapped__'): + unwrap = unwrap.__wrapped__ + continue + if isinstance(unwrap, functools.partial): + unwrap = unwrap.func + continue + break + if hasattr(unwrap, "__globals__"): + obj_globals = unwrap.__globals__ + + if globals is None: + globals = obj_globals + if locals is None: + locals = obj_locals or {} + + # "Inject" type parameters into the local namespace + # (unless they are shadowed by assignments *in* the local namespace), + # as a way of emulating annotation scopes when calling `eval()` + if type_params := getattr(obj, "__type_params__", ()): + locals = {param.__name__: param for param in type_params} | locals + + return_value = {key: + value if not isinstance(value, str) else eval(value, globals, locals) + for key, value in ann.items() } + return return_value + + +if hasattr(typing, "evaluate_forward_ref"): + evaluate_forward_ref = typing.evaluate_forward_ref +else: + # Implements annotationlib.ForwardRef.evaluate + def _eval_with_owner( + forward_ref, *, owner=None, globals=None, locals=None, type_params=None + ): + if forward_ref.__forward_evaluated__: + return forward_ref.__forward_value__ + if getattr(forward_ref, "__cell__", None) is not None: + try: + value = forward_ref.__cell__.cell_contents + except ValueError: + pass + else: + forward_ref.__forward_evaluated__ = True + forward_ref.__forward_value__ = value + return value + if owner is None: + owner = getattr(forward_ref, "__owner__", None) + + if ( + globals is None + and getattr(forward_ref, "__forward_module__", None) is not None + ): + globals = getattr( + sys.modules.get(forward_ref.__forward_module__, None), "__dict__", None + ) + if globals is None: + globals = getattr(forward_ref, "__globals__", None) + if globals is None: + if isinstance(owner, type): + module_name = getattr(owner, "__module__", None) + if module_name: + module = sys.modules.get(module_name, None) + if module: + globals = getattr(module, "__dict__", None) + elif isinstance(owner, _types.ModuleType): + globals = getattr(owner, "__dict__", None) + elif callable(owner): + globals = getattr(owner, "__globals__", None) + + # If we pass None to eval() below, the globals of this module are used. + if globals is None: + globals = {} + + if locals is None: + locals = {} + if isinstance(owner, type): + locals.update(vars(owner)) + + if type_params is None and owner is not None: + # "Inject" type parameters into the local namespace + # (unless they are shadowed by assignments *in* the local namespace), + # as a way of emulating annotation scopes when calling `eval()` + type_params = getattr(owner, "__type_params__", None) + + # Type parameters exist in their own scope, which is logically + # between the locals and the globals. We simulate this by adding + # them to the globals. + if type_params is not None: + globals = dict(globals) + for param in type_params: + globals[param.__name__] = param + + arg = forward_ref.__forward_arg__ + if arg.isidentifier() and not keyword.iskeyword(arg): + if arg in locals: + value = locals[arg] + elif arg in globals: + value = globals[arg] + elif hasattr(builtins, arg): + return getattr(builtins, arg) + else: + raise NameError(arg) + else: + code = forward_ref.__forward_code__ + value = eval(code, globals, locals) + forward_ref.__forward_evaluated__ = True + forward_ref.__forward_value__ = value + return value + + def evaluate_forward_ref( + forward_ref, + *, + owner=None, + globals=None, + locals=None, + type_params=None, + format=None, + _recursive_guard=frozenset(), + ): + """Evaluate a forward reference as a type hint. + + This is similar to calling the ForwardRef.evaluate() method, + but unlike that method, evaluate_forward_ref() also: + + * Recursively evaluates forward references nested within the type hint. + * Rejects certain objects that are not valid type hints. + * Replaces type hints that evaluate to None with types.NoneType. + * Supports the *FORWARDREF* and *STRING* formats. + + *forward_ref* must be an instance of ForwardRef. *owner*, if given, + should be the object that holds the annotations that the forward reference + derived from, such as a module, class object, or function. It is used to + infer the namespaces to use for looking up names. *globals* and *locals* + can also be explicitly given to provide the global and local namespaces. + *type_params* is a tuple of type parameters that are in scope when + evaluating the forward reference. This parameter must be provided (though + it may be an empty tuple) if *owner* is not given and the forward reference + does not already have an owner set. *format* specifies the format of the + annotation and is a member of the annotationlib.Format enum. + + """ + if format == Format.STRING: + return forward_ref.__forward_arg__ + if forward_ref.__forward_arg__ in _recursive_guard: + return forward_ref + + # Evaluate the forward reference + try: + value = _eval_with_owner( + forward_ref, + owner=owner, + globals=globals, + locals=locals, + type_params=type_params, + ) + except NameError: + if format == Format.FORWARDREF: + return forward_ref + else: + raise + + if isinstance(value, str): + value = ForwardRef(value) + + # Recursively evaluate the type + if isinstance(value, ForwardRef): + if getattr(value, "__forward_module__", True) is not None: + globals = None + return evaluate_forward_ref( + value, + globals=globals, + locals=locals, + type_params=type_params, owner=owner, + _recursive_guard=_recursive_guard, format=format + ) + if sys.version_info < (3, 12, 5) and type_params: + # Make use of type_params + locals = dict(locals) if locals else {} + for tvar in type_params: + if tvar.__name__ not in locals: # lets not overwrite something present + locals[tvar.__name__] = tvar + if sys.version_info < (3, 12, 5): + return typing._eval_type( + value, + globals, + locals, + recursive_guard=_recursive_guard | {forward_ref.__forward_arg__}, + ) + else: + return typing._eval_type( + value, + globals, + locals, + type_params, + recursive_guard=_recursive_guard | {forward_ref.__forward_arg__}, + ) + + +class Sentinel: + """Create a unique sentinel object. + + *name* should be the name of the variable to which the return value shall be assigned. + + *repr*, if supplied, will be used for the repr of the sentinel object. + If not provided, "" will be used. + """ + + def __init__( + self, + name: str, + repr: typing.Optional[str] = None, + ): + self._name = name + self._repr = repr if repr is not None else f'<{name}>' + + def __repr__(self): + return self._repr + + if sys.version_info < (3, 11): + # The presence of this method convinces typing._type_check + # that Sentinels are types. + def __call__(self, *args, **kwargs): + raise TypeError(f"{type(self).__name__!r} object is not callable") + + # Breakpoint: https://github.com/python/cpython/pull/21515 + if sys.version_info >= (3, 10): + def __or__(self, other): + return typing.Union[self, other] + + def __ror__(self, other): + return typing.Union[other, self] + + def __getstate__(self): + raise TypeError(f"Cannot pickle {type(self).__name__!r} object") + + +if sys.version_info >= (3, 14, 0, "beta"): + type_repr = annotationlib.type_repr +else: + def type_repr(value): + """Convert a Python value to a format suitable for use with the STRING format. + + This is intended as a helper for tools that support the STRING format but do + not have access to the code that originally produced the annotations. It uses + repr() for most objects. + + """ + if isinstance(value, (type, _types.FunctionType, _types.BuiltinFunctionType)): + if value.__module__ == "builtins": + return value.__qualname__ + return f"{value.__module__}.{value.__qualname__}" + if value is ...: + return "..." + return repr(value) + + +# Aliases for items that are in typing in all supported versions. +# We use hasattr() checks so this library will continue to import on +# future versions of Python that may remove these names. +_typing_names = [ + "AbstractSet", + "AnyStr", + "BinaryIO", + "Callable", + "Collection", + "Container", + "Dict", + "FrozenSet", + "Hashable", + "IO", + "ItemsView", + "Iterable", + "Iterator", + "KeysView", + "List", + "Mapping", + "MappingView", + "Match", + "MutableMapping", + "MutableSequence", + "MutableSet", + "Optional", + "Pattern", + "Reversible", + "Sequence", + "Set", + "Sized", + "TextIO", + "Tuple", + "Union", + "ValuesView", + "cast", + "no_type_check", + "no_type_check_decorator", + # This is private, but it was defined by typing_extensions for a long time + # and some users rely on it. + "_AnnotatedAlias", +] +globals().update( + {name: getattr(typing, name) for name in _typing_names if hasattr(typing, name)} +) +# These are defined unconditionally because they are used in +# typing-extensions itself. +Generic = typing.Generic +ForwardRef = typing.ForwardRef +Annotated = typing.Annotated diff --git a/venv/lib/python3.12/site-packages/werkzeug-3.1.4.dist-info/INSTALLER b/venv/lib/python3.12/site-packages/werkzeug-3.1.4.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/venv/lib/python3.12/site-packages/werkzeug-3.1.4.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.12/site-packages/werkzeug-3.1.3.dist-info/METADATA b/venv/lib/python3.12/site-packages/werkzeug-3.1.4.dist-info/METADATA similarity index 83% rename from venv/lib/python3.12/site-packages/werkzeug-3.1.3.dist-info/METADATA rename to venv/lib/python3.12/site-packages/werkzeug-3.1.4.dist-info/METADATA index e495b3d2..7ceaed1e 100644 --- a/venv/lib/python3.12/site-packages/werkzeug-3.1.3.dist-info/METADATA +++ b/venv/lib/python3.12/site-packages/werkzeug-3.1.4.dist-info/METADATA @@ -1,14 +1,14 @@ -Metadata-Version: 2.3 +Metadata-Version: 2.4 Name: Werkzeug -Version: 3.1.3 +Version: 3.1.4 Summary: The comprehensive WSGI web application library. Maintainer-email: Pallets Requires-Python: >=3.9 Description-Content-Type: text/markdown +License-Expression: BSD-3-Clause Classifier: Development Status :: 5 - Production/Stable Classifier: Environment :: Web Environment Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License Classifier: Operating System :: OS Independent Classifier: Programming Language :: Python Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content @@ -17,16 +17,18 @@ Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Application Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware Classifier: Topic :: Software Development :: Libraries :: Application Frameworks Classifier: Typing :: Typed -Requires-Dist: MarkupSafe>=2.1.1 +License-File: LICENSE.txt +Requires-Dist: markupsafe>=2.1.1 Requires-Dist: watchdog>=2.3 ; extra == "watchdog" -Project-URL: Changes, https://werkzeug.palletsprojects.com/changes/ +Project-URL: Changes, https://werkzeug.palletsprojects.com/page/changes/ Project-URL: Chat, https://discord.gg/pallets Project-URL: Documentation, https://werkzeug.palletsprojects.com/ Project-URL: Donate, https://palletsprojects.com/donate -Project-URL: Issue Tracker, https://github.com/pallets/werkzeug/issues/ -Project-URL: Source Code, https://github.com/pallets/werkzeug/ +Project-URL: Source, https://github.com/pallets/werkzeug/ Provides-Extra: watchdog +
+ # Werkzeug *werkzeug* German noun: "tool". Etymology: *werk* ("work"), *zeug* ("stuff") @@ -97,3 +99,11 @@ users, and allow the maintainers to devote more time to the projects, [please donate today]: https://palletsprojects.com/donate +## Contributing + +See our [detailed contributing documentation][contrib] for many ways to +contribute, including reporting issues, requesting features, asking or answering +questions, and making PRs. + +[contrib]: https://palletsprojects.com/contributing/ + diff --git a/venv/lib/python3.12/site-packages/werkzeug-3.1.3.dist-info/RECORD b/venv/lib/python3.12/site-packages/werkzeug-3.1.4.dist-info/RECORD similarity index 69% rename from venv/lib/python3.12/site-packages/werkzeug-3.1.3.dist-info/RECORD rename to venv/lib/python3.12/site-packages/werkzeug-3.1.4.dist-info/RECORD index c807f264..40eafaf6 100644 --- a/venv/lib/python3.12/site-packages/werkzeug-3.1.3.dist-info/RECORD +++ b/venv/lib/python3.12/site-packages/werkzeug-3.1.4.dist-info/RECORD @@ -1,8 +1,9 @@ -werkzeug-3.1.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -werkzeug-3.1.3.dist-info/LICENSE.txt,sha256=O0nc7kEF6ze6wQ-vG-JgQI_oXSUrjp3y4JefweCUQ3s,1475 -werkzeug-3.1.3.dist-info/METADATA,sha256=9d8uO6J-ck8mHyFjxFUs5nCVy5iN3XdTYcJfz2QBr0I,3682 -werkzeug-3.1.3.dist-info/RECORD,, -werkzeug-3.1.3.dist-info/WHEEL,sha256=CpUCUxeHQbRN5UGRQHYRJorO5Af-Qy_fHMctcQ8DSGI,82 +werkzeug-3.1.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +werkzeug-3.1.4.dist-info/METADATA,sha256=USa527kXfziccFVdgdO4LyqeDOHHnIxwJV87VyHJHjA,4029 +werkzeug-3.1.4.dist-info/RECORD,, +werkzeug-3.1.4.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +werkzeug-3.1.4.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82 +werkzeug-3.1.4.dist-info/licenses/LICENSE.txt,sha256=O0nc7kEF6ze6wQ-vG-JgQI_oXSUrjp3y4JefweCUQ3s,1475 werkzeug/__init__.py,sha256=CejNWfCZKDaJ_FmshZFw8dFgjNAlu6q15ec7DwWJlM8,165 werkzeug/__pycache__/__init__.cpython-312.pyc,, werkzeug/__pycache__/_internal.cpython-312.pyc,, @@ -19,8 +20,8 @@ werkzeug/__pycache__/urls.cpython-312.pyc,, werkzeug/__pycache__/user_agent.cpython-312.pyc,, werkzeug/__pycache__/utils.cpython-312.pyc,, werkzeug/__pycache__/wsgi.cpython-312.pyc,, -werkzeug/_internal.py,sha256=su1olkbHMkzt0VKcEkPLCha8sdVzXNBuqW6YVpp8GHg,5545 -werkzeug/_reloader.py,sha256=QuMO-UwuD-cHsUpwSEUr7iYNbv_ziHFhgNDAiv25J80,15429 +werkzeug/_internal.py,sha256=QRynfUEOngEAmm4qzhlufbhk1K-n901Q1KQWKznsaNU,5547 +werkzeug/_reloader.py,sha256=IppO3kX_GxQwaV6h8SPWe0lSTSw6kd_HJXqLiUhR7AU,15100 werkzeug/datastructures/__init__.py,sha256=0Vt8Lt8KUYSoAe8ADt1jIE-w3Ib9tkrk0HEhBOktqUw,2615 werkzeug/datastructures/__pycache__/__init__.cpython-312.pyc,, werkzeug/datastructures/__pycache__/accept.cpython-312.pyc,, @@ -35,15 +36,15 @@ werkzeug/datastructures/__pycache__/range.cpython-312.pyc,, werkzeug/datastructures/__pycache__/structures.cpython-312.pyc,, werkzeug/datastructures/accept.py,sha256=xoDD4qwZeTjew_1Ze0gyyKWkbhtowwR5tHTWnobPeGI,12177 werkzeug/datastructures/auth.py,sha256=Zcqdr8-uUpq6_czg1672S2XthG0UakhUHOuummzqz64,10055 -werkzeug/datastructures/cache_control.py,sha256=hh-0nlj8CWDTrWcDvprMkmHZ8HW_Afo4853vl0LuFU4,9697 +werkzeug/datastructures/cache_control.py,sha256=omtSmmAh3KgQRJYzaW05LtEt5HoJlLkJTxjil-Q3cuM,9711 werkzeug/datastructures/csp.py,sha256=2UDhFNqBzjhpA45m11HDg6LnhBSSWWdkfFzPq7ikfho,3816 werkzeug/datastructures/etag.py,sha256=N2dFqhFsLR0k5hgjol3QTGpcSltJhpgLWeyz6Va80G8,3278 -werkzeug/datastructures/file_storage.py,sha256=2P-4PEkR_kY37g3hc1S30YH_15VrwRnQ13sBFreZyc4,6701 -werkzeug/datastructures/headers.py,sha256=_VjcQCrLxjNKxPL8xSoFnEJnZ5tRc1zdL9spQf4c758,21554 -werkzeug/datastructures/mixins.py,sha256=sYJeA8kZuAOvzlfrRWRN2dyuNrBev4yFxWH8coZyrec,9027 -werkzeug/datastructures/range.py,sha256=5c1Mo5jBkkVhQyLoD9lPdjETKo0iw9g-lFYOqF1ZQvY,7020 -werkzeug/datastructures/structures.py,sha256=IG5WpAj5RZSUfyxYXXHvIgtlOx5PQeZ9YmQO87FJr08,41359 -werkzeug/debug/__init__.py,sha256=80vaavVZYwV21ItrsOLQudEUi2uKDlxcjm5cCmeU1u4,20016 +werkzeug/datastructures/file_storage.py,sha256=J57paAQGxBnq2yjY0yfsEuN1wPRkDXLkWH3wk_pQvTs,6715 +werkzeug/datastructures/headers.py,sha256=J3WQKnQGmt9hUDn-d_YI62PtrjW7U7sJTDNTFtjWgRA,21540 +werkzeug/datastructures/mixins.py,sha256=h8K_A0Sw47s2cO2ZNFf60ENbmVDsi3rgHr_f77VFwJE,9039 +werkzeug/datastructures/range.py,sha256=KAWLRDyL8bzSBDm_OirxwA17xkFM_vdedZINmyDfNUI,7034 +werkzeug/datastructures/structures.py,sha256=0x0jd54T4cLOIRM6sRYeXt_tjmaftItNfjKFhLiKLyQ,41356 +werkzeug/debug/__init__.py,sha256=raK5zQjmkrjDvS5mDUIZvZoL8YRMEne5eBG3C2vUCgc,20229 werkzeug/debug/__pycache__/__init__.cpython-312.pyc,, werkzeug/debug/__pycache__/console.cpython-312.pyc,, werkzeug/debug/__pycache__/repr.cpython-312.pyc,, @@ -56,11 +57,11 @@ werkzeug/debug/shared/debugger.js,sha256=SRL9YZ9FTVngaYD-INQNincEVdZ-kBHa_-VJx0U werkzeug/debug/shared/less.png,sha256=-4-kNRaXJSONVLahrQKUxMwXGm9R4OnZ9SxDGpHlIR4,191 werkzeug/debug/shared/more.png,sha256=GngN7CioHQoV58rH6ojnkYi8c_qED2Aka5FO5UXrReY,200 werkzeug/debug/shared/style.css,sha256=-xSxzUEZGw_IqlDR5iZxitNl8LQUjBM-_Y4UAvXVH8g,6078 -werkzeug/debug/tbtools.py,sha256=4JDhZT_2gm9sMgfiMieEyxRsV196i3kp_0b3MOjy1KY,13560 -werkzeug/exceptions.py,sha256=cpwKKTfe1_yFrBpRHjp2bqwDhtUdHnGoDKcWOXR_SIE,26455 +werkzeug/debug/tbtools.py,sha256=L_CD1ha5WmyWrKVZ3zxovJ4gI1CATsc4Yc1Zms41EvE,13645 +werkzeug/exceptions.py,sha256=7XNgddMV3UQR6UPr7lWlzSFxO8Ur8bSJgLdvJuauEX0,26888 werkzeug/formparser.py,sha256=v0-F9OcmNHuPLqXePW_b0asWzpH-Z_ZlGj8ao0CMmuo,15847 -werkzeug/http.py,sha256=PRpjIrbvIIiAMlmxwVIo9MMxaNX8vSOSn58rwAThEWM,44880 -werkzeug/local.py,sha256=KUFuAm8BAayQouzVg0MGqW_hiwY8Z_lY5l7d1Scvsx8,22492 +werkzeug/http.py,sha256=gOc_P6u9Am2Z8fQolD_JnBi25FAUUpJXtp0js22Fwtg,44908 +werkzeug/local.py,sha256=o4Zz1ZAuNRBn8r-laGIxCdKXjxx1i0uJq3eNyKPy4cg,22184 werkzeug/middleware/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 werkzeug/middleware/__pycache__/__init__.cpython-312.pyc,, werkzeug/middleware/__pycache__/dispatcher.cpython-312.pyc,, @@ -87,7 +88,7 @@ werkzeug/routing/converters.py,sha256=iqpee_mAjr1oGbq0etujYF9PiDv5U7DgNkARHXnMId werkzeug/routing/exceptions.py,sha256=wNBiUmUk4OtFOpbdDSr7KKKUjH7yn84JqwBicUup8p8,4846 werkzeug/routing/map.py,sha256=mEXlHOyinkg1Jtx5L0UDYsvoX4eVLiEuEVQzD5LVAz8,36515 werkzeug/routing/matcher.py,sha256=nfBbl37eGAkZ1dQlumshFcPuyfggmFjPuSSQOE6GuYs,7849 -werkzeug/routing/rules.py,sha256=eGi6PD-COG2As_HY0nAw-nxYxLTH0FsuqRaSy8d9FjQ,32510 +werkzeug/routing/rules.py,sha256=XADfK242jF_r4-q7nBo2Zz66RTzcCPOqG5WNfRFNpoo,32538 werkzeug/sansio/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 werkzeug/sansio/__pycache__/__init__.cpython-312.pyc,, werkzeug/sansio/__pycache__/http.cpython-312.pyc,, @@ -95,22 +96,22 @@ werkzeug/sansio/__pycache__/multipart.cpython-312.pyc,, werkzeug/sansio/__pycache__/request.cpython-312.pyc,, werkzeug/sansio/__pycache__/response.cpython-312.pyc,, werkzeug/sansio/__pycache__/utils.cpython-312.pyc,, -werkzeug/sansio/http.py,sha256=OYlTJn8fJhHA0CDOOmerekpsyGyIQVm9LvVRO3lH0TA,5337 -werkzeug/sansio/multipart.py,sha256=fMGgY9CvMDRgNgKozz2cBIp0iN2EME2IIOBh306_Ecs,11637 +werkzeug/sansio/http.py,sha256=l-N1ktc7WVgfDA9t1MhyeVFNJGvnRTC8mJViZDQYVGA,5351 +werkzeug/sansio/multipart.py,sha256=zrap1U40dh7kR_c5oenXKDbbiwTdCGuecSkc1pwxBb8,11963 werkzeug/sansio/request.py,sha256=ijwestWQS-dpb61uKl0NRntzNtH6C72OXCNomx-XKxc,19832 -werkzeug/sansio/response.py,sha256=8pE2fiHDroOOCtBWJWig4Lnv3VztD5szkSzeQELIlcQ,27965 +werkzeug/sansio/response.py,sha256=WMEdlLMZMGjy0EQKDF4Z1ZPpBbi3BzVw_qfoH5IuWkI,27933 werkzeug/sansio/utils.py,sha256=3-Gr0Qsa_Rx48mUm7zdFJZNNKMzHfPAknywujHnkhPs,5256 -werkzeug/security.py,sha256=WctngEBHBo8e-gXf6lMURhs1t0tmEjJjBYZi2MXoWss,5581 -werkzeug/serving.py,sha256=k30NzIKq6mkNONk7B5rTyEuLrLZo__Kz2_XGXLjVUic,39857 -werkzeug/test.py,sha256=2G08IExd_x3T1P69mz57LYYQrqk31nxmkgHY5M5HYZU,52795 +werkzeug/security.py,sha256=aMKVfh67gTOtmBM6tasnohIVMxGI9ooxLQ0gkktePtY,5960 +werkzeug/serving.py,sha256=jUSzy9o1iZ_LwdReXZusdoOJPjMK03xGCzhsZ_SUtpk,39826 +werkzeug/test.py,sha256=Xf8bCYkjKJLtTXWTvw4E5AlYrK7CYjnmIolxSHkkKsc,52779 werkzeug/testapp.py,sha256=5_IS5Dh_WfWfNcTLmbydj01lomgcKA_4l9PPCNZnmdI,6332 werkzeug/urls.py,sha256=XyNKwHvK5IC37-wuIDMYWkiCJ3yLTLGv7wn2GF3ndqI,6430 werkzeug/user_agent.py,sha256=lSlLYKCcbzCUSkbdAoO8zPk2UR-8Mdn6iu_iA2kYPBA,1416 -werkzeug/utils.py,sha256=1StUqI45bcxJx4sj81qVPQH5X8VuWgtICViJI_BKg-c,24725 +werkzeug/utils.py,sha256=QRcyUe4JURGaOAqZGnECcsbwKsEkL7gUDZiTNlLV0W4,24623 werkzeug/wrappers/__init__.py,sha256=b78jCM8x96kJUGLZ5FYFR3zlK-3pnFAmP9RJIGU0ses,138 werkzeug/wrappers/__pycache__/__init__.cpython-312.pyc,, werkzeug/wrappers/__pycache__/request.cpython-312.pyc,, werkzeug/wrappers/__pycache__/response.cpython-312.pyc,, -werkzeug/wrappers/request.py,sha256=2eJKd8yBU4zc0FIm2T6SROeuZERkP9m_Yx1Z7MeMs-Y,24737 -werkzeug/wrappers/response.py,sha256=u6zg7VpNYrCeEjpIgf8VqgfaSi9yR_9wi9ly2uudglg,32459 -werkzeug/wsgi.py,sha256=P7jB0VpG6X6miies4uk7Zgm7NVm4Yz8Ra6Inr5q_FMs,20894 +werkzeug/wrappers/request.py,sha256=f9Jpe9zqs_v0n-DTZj7FfMyU_y3LhBBBX4YCYsrqGUs,24730 +werkzeug/wrappers/response.py,sha256=VRcXJVRjaYYE0Q93PVu0Y5nhDf9TJkjhY8j_RtMalX0,32443 +werkzeug/wsgi.py,sha256=SAj53op0Hqa6Bd10FHglfR8wk7jLGw5_vn9naPSDlpM,20920 diff --git a/venv/lib/python3.12/site-packages/werkzeug-3.1.4.dist-info/REQUESTED b/venv/lib/python3.12/site-packages/werkzeug-3.1.4.dist-info/REQUESTED new file mode 100644 index 00000000..e69de29b diff --git a/venv/lib/python3.12/site-packages/werkzeug-3.1.4.dist-info/WHEEL b/venv/lib/python3.12/site-packages/werkzeug-3.1.4.dist-info/WHEEL new file mode 100644 index 00000000..d8b9936d --- /dev/null +++ b/venv/lib/python3.12/site-packages/werkzeug-3.1.4.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: flit 3.12.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/venv/lib/python3.12/site-packages/werkzeug-3.1.3.dist-info/LICENSE.txt b/venv/lib/python3.12/site-packages/werkzeug-3.1.4.dist-info/licenses/LICENSE.txt similarity index 100% rename from venv/lib/python3.12/site-packages/werkzeug-3.1.3.dist-info/LICENSE.txt rename to venv/lib/python3.12/site-packages/werkzeug-3.1.4.dist-info/licenses/LICENSE.txt diff --git a/venv/lib/python3.12/site-packages/werkzeug/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/werkzeug/__pycache__/__init__.cpython-312.pyc index e01b95d3..53440cf1 100644 Binary files a/venv/lib/python3.12/site-packages/werkzeug/__pycache__/__init__.cpython-312.pyc and b/venv/lib/python3.12/site-packages/werkzeug/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/werkzeug/__pycache__/_internal.cpython-312.pyc b/venv/lib/python3.12/site-packages/werkzeug/__pycache__/_internal.cpython-312.pyc index 4487e794..ab71791d 100644 Binary files a/venv/lib/python3.12/site-packages/werkzeug/__pycache__/_internal.cpython-312.pyc and b/venv/lib/python3.12/site-packages/werkzeug/__pycache__/_internal.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/werkzeug/__pycache__/_reloader.cpython-312.pyc b/venv/lib/python3.12/site-packages/werkzeug/__pycache__/_reloader.cpython-312.pyc index 7b2db9d6..be9f1ff3 100644 Binary files a/venv/lib/python3.12/site-packages/werkzeug/__pycache__/_reloader.cpython-312.pyc and b/venv/lib/python3.12/site-packages/werkzeug/__pycache__/_reloader.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/werkzeug/__pycache__/exceptions.cpython-312.pyc b/venv/lib/python3.12/site-packages/werkzeug/__pycache__/exceptions.cpython-312.pyc index 3c4500c6..92d63942 100644 Binary files a/venv/lib/python3.12/site-packages/werkzeug/__pycache__/exceptions.cpython-312.pyc and b/venv/lib/python3.12/site-packages/werkzeug/__pycache__/exceptions.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/werkzeug/__pycache__/formparser.cpython-312.pyc b/venv/lib/python3.12/site-packages/werkzeug/__pycache__/formparser.cpython-312.pyc index 1b080261..fe9d5ee2 100644 Binary files a/venv/lib/python3.12/site-packages/werkzeug/__pycache__/formparser.cpython-312.pyc and b/venv/lib/python3.12/site-packages/werkzeug/__pycache__/formparser.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/werkzeug/__pycache__/http.cpython-312.pyc b/venv/lib/python3.12/site-packages/werkzeug/__pycache__/http.cpython-312.pyc index 97e0bd67..dd176049 100644 Binary files a/venv/lib/python3.12/site-packages/werkzeug/__pycache__/http.cpython-312.pyc and b/venv/lib/python3.12/site-packages/werkzeug/__pycache__/http.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/werkzeug/__pycache__/local.cpython-312.pyc b/venv/lib/python3.12/site-packages/werkzeug/__pycache__/local.cpython-312.pyc index 7d4e32c1..e475aaca 100644 Binary files a/venv/lib/python3.12/site-packages/werkzeug/__pycache__/local.cpython-312.pyc and b/venv/lib/python3.12/site-packages/werkzeug/__pycache__/local.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/werkzeug/__pycache__/security.cpython-312.pyc b/venv/lib/python3.12/site-packages/werkzeug/__pycache__/security.cpython-312.pyc index b1115f63..8f1ef92b 100644 Binary files a/venv/lib/python3.12/site-packages/werkzeug/__pycache__/security.cpython-312.pyc and b/venv/lib/python3.12/site-packages/werkzeug/__pycache__/security.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/werkzeug/__pycache__/serving.cpython-312.pyc b/venv/lib/python3.12/site-packages/werkzeug/__pycache__/serving.cpython-312.pyc index e9a2b5a0..bb9f2f82 100644 Binary files a/venv/lib/python3.12/site-packages/werkzeug/__pycache__/serving.cpython-312.pyc and b/venv/lib/python3.12/site-packages/werkzeug/__pycache__/serving.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/werkzeug/__pycache__/test.cpython-312.pyc b/venv/lib/python3.12/site-packages/werkzeug/__pycache__/test.cpython-312.pyc index afe486b8..b66567c5 100644 Binary files a/venv/lib/python3.12/site-packages/werkzeug/__pycache__/test.cpython-312.pyc and b/venv/lib/python3.12/site-packages/werkzeug/__pycache__/test.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/werkzeug/__pycache__/testapp.cpython-312.pyc b/venv/lib/python3.12/site-packages/werkzeug/__pycache__/testapp.cpython-312.pyc index ae6339c5..8d97af96 100644 Binary files a/venv/lib/python3.12/site-packages/werkzeug/__pycache__/testapp.cpython-312.pyc and b/venv/lib/python3.12/site-packages/werkzeug/__pycache__/testapp.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/werkzeug/__pycache__/urls.cpython-312.pyc b/venv/lib/python3.12/site-packages/werkzeug/__pycache__/urls.cpython-312.pyc index 94156462..866d2238 100644 Binary files a/venv/lib/python3.12/site-packages/werkzeug/__pycache__/urls.cpython-312.pyc and b/venv/lib/python3.12/site-packages/werkzeug/__pycache__/urls.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/werkzeug/__pycache__/user_agent.cpython-312.pyc b/venv/lib/python3.12/site-packages/werkzeug/__pycache__/user_agent.cpython-312.pyc index a1e597ac..bb0beef4 100644 Binary files a/venv/lib/python3.12/site-packages/werkzeug/__pycache__/user_agent.cpython-312.pyc and b/venv/lib/python3.12/site-packages/werkzeug/__pycache__/user_agent.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/werkzeug/__pycache__/utils.cpython-312.pyc b/venv/lib/python3.12/site-packages/werkzeug/__pycache__/utils.cpython-312.pyc index 22421f85..23c08932 100644 Binary files a/venv/lib/python3.12/site-packages/werkzeug/__pycache__/utils.cpython-312.pyc and b/venv/lib/python3.12/site-packages/werkzeug/__pycache__/utils.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/werkzeug/__pycache__/wsgi.cpython-312.pyc b/venv/lib/python3.12/site-packages/werkzeug/__pycache__/wsgi.cpython-312.pyc index 88f27049..10865492 100644 Binary files a/venv/lib/python3.12/site-packages/werkzeug/__pycache__/wsgi.cpython-312.pyc and b/venv/lib/python3.12/site-packages/werkzeug/__pycache__/wsgi.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/werkzeug/_internal.py b/venv/lib/python3.12/site-packages/werkzeug/_internal.py index 7dd2fbcc..75e43358 100644 --- a/venv/lib/python3.12/site-packages/werkzeug/_internal.py +++ b/venv/lib/python3.12/site-packages/werkzeug/_internal.py @@ -36,9 +36,9 @@ def _wsgi_encoding_dance(s: str) -> str: def _get_environ(obj: WSGIEnvironment | Request) -> WSGIEnvironment: env = getattr(obj, "environ", obj) - assert isinstance( - env, dict - ), f"{type(obj).__name__!r} is not a WSGI environment (has to be a dict)" + assert isinstance(env, dict), ( + f"{type(obj).__name__!r} is not a WSGI environment (has to be a dict)" + ) return env diff --git a/venv/lib/python3.12/site-packages/werkzeug/_reloader.py b/venv/lib/python3.12/site-packages/werkzeug/_reloader.py index 8fd50b96..5eb64bc7 100644 --- a/venv/lib/python3.12/site-packages/werkzeug/_reloader.py +++ b/venv/lib/python3.12/site-packages/werkzeug/_reloader.py @@ -26,6 +26,9 @@ if hasattr(sys, "real_prefix"): _stat_ignore_scan = tuple(prefix) del prefix +# Ignore __pycache__ since a change there will always have a change to +# the source file (or initial pyc file) as well. Ignore common version control +# internals. Ignore common tool caches. _ignore_common_dirs = { "__pycache__", ".git", @@ -86,9 +89,6 @@ def _find_stat_paths( parent_has_py = {os.path.dirname(path): True} for root, dirs, files in os.walk(path): - # Optimizations: ignore system prefixes, __pycache__ will - # have a py or pyc module at the import path, ignore some - # common known dirs such as version control and tool caches. if ( root.startswith(_stat_ignore_scan) or os.path.basename(root) in _ignore_common_dirs @@ -325,7 +325,7 @@ class WatchdogReloaderLoop(ReloaderLoop): trigger_reload = self.trigger_reload class EventHandler(PatternMatchingEventHandler): - def on_any_event(self, event: FileModifiedEvent): # type: ignore + def on_any_event(self, event: FileModifiedEvent) -> None: # type: ignore[override] if event.event_type not in { EVENT_TYPE_CLOSED, EVENT_TYPE_CREATED, @@ -345,12 +345,7 @@ class WatchdogReloaderLoop(ReloaderLoop): self.name = f"watchdog ({reloader_name})" self.observer = Observer() - # Extra patterns can be non-Python files, match them in addition - # to all Python files in default and extra directories. Ignore - # __pycache__ since a change there will always have a change to - # the source file (or initial pyc file) as well. Ignore Git and - # Mercurial internal changes. - extra_patterns = [p for p in self.extra_files if not os.path.isdir(p)] + extra_patterns = (p for p in self.extra_files if not os.path.isdir(p)) self.event_handler = EventHandler( patterns=["*.py", "*.pyc", "*.zip", *extra_patterns], ignore_patterns=[ @@ -358,13 +353,13 @@ class WatchdogReloaderLoop(ReloaderLoop): *self.exclude_patterns, ], ) - self.should_reload = False + self.should_reload = threading.Event() def trigger_reload(self, filename: str | bytes) -> None: # This is called inside an event handler, which means throwing # SystemExit has no effect. # https://github.com/gorakhargosh/watchdog/issues/294 - self.should_reload = True + self.should_reload.set() self.log_reload(filename) def __enter__(self) -> ReloaderLoop: @@ -377,9 +372,8 @@ class WatchdogReloaderLoop(ReloaderLoop): self.observer.join() def run(self) -> None: - while not self.should_reload: + while not self.should_reload.wait(timeout=self.interval): self.run_step() - time.sleep(self.interval) sys.exit(3) @@ -393,7 +387,7 @@ class WatchdogReloaderLoop(ReloaderLoop): self.event_handler, path, recursive=True ) except OSError: - # Clear this path from list of watches We don't want + # Clear this path from list of watches. We don't want # the same error message showing again in the next # iteration. self.watches[path] = None diff --git a/venv/lib/python3.12/site-packages/werkzeug/datastructures/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/werkzeug/datastructures/__pycache__/__init__.cpython-312.pyc index 96f016af..da84c84c 100644 Binary files a/venv/lib/python3.12/site-packages/werkzeug/datastructures/__pycache__/__init__.cpython-312.pyc and b/venv/lib/python3.12/site-packages/werkzeug/datastructures/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/werkzeug/datastructures/__pycache__/accept.cpython-312.pyc b/venv/lib/python3.12/site-packages/werkzeug/datastructures/__pycache__/accept.cpython-312.pyc index 526c9d1d..3a6ce787 100644 Binary files a/venv/lib/python3.12/site-packages/werkzeug/datastructures/__pycache__/accept.cpython-312.pyc and b/venv/lib/python3.12/site-packages/werkzeug/datastructures/__pycache__/accept.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/werkzeug/datastructures/__pycache__/auth.cpython-312.pyc b/venv/lib/python3.12/site-packages/werkzeug/datastructures/__pycache__/auth.cpython-312.pyc index 8b583058..54522b0c 100644 Binary files a/venv/lib/python3.12/site-packages/werkzeug/datastructures/__pycache__/auth.cpython-312.pyc and b/venv/lib/python3.12/site-packages/werkzeug/datastructures/__pycache__/auth.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/werkzeug/datastructures/__pycache__/cache_control.cpython-312.pyc b/venv/lib/python3.12/site-packages/werkzeug/datastructures/__pycache__/cache_control.cpython-312.pyc index d3736f0c..e3e3e956 100644 Binary files a/venv/lib/python3.12/site-packages/werkzeug/datastructures/__pycache__/cache_control.cpython-312.pyc and b/venv/lib/python3.12/site-packages/werkzeug/datastructures/__pycache__/cache_control.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/werkzeug/datastructures/__pycache__/csp.cpython-312.pyc b/venv/lib/python3.12/site-packages/werkzeug/datastructures/__pycache__/csp.cpython-312.pyc index 4f34f8a9..098f58e6 100644 Binary files a/venv/lib/python3.12/site-packages/werkzeug/datastructures/__pycache__/csp.cpython-312.pyc and b/venv/lib/python3.12/site-packages/werkzeug/datastructures/__pycache__/csp.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/werkzeug/datastructures/__pycache__/etag.cpython-312.pyc b/venv/lib/python3.12/site-packages/werkzeug/datastructures/__pycache__/etag.cpython-312.pyc index 6e0e7cd6..daecc4d1 100644 Binary files a/venv/lib/python3.12/site-packages/werkzeug/datastructures/__pycache__/etag.cpython-312.pyc and b/venv/lib/python3.12/site-packages/werkzeug/datastructures/__pycache__/etag.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/werkzeug/datastructures/__pycache__/file_storage.cpython-312.pyc b/venv/lib/python3.12/site-packages/werkzeug/datastructures/__pycache__/file_storage.cpython-312.pyc index 1e2ae967..e7e97711 100644 Binary files a/venv/lib/python3.12/site-packages/werkzeug/datastructures/__pycache__/file_storage.cpython-312.pyc and b/venv/lib/python3.12/site-packages/werkzeug/datastructures/__pycache__/file_storage.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/werkzeug/datastructures/__pycache__/headers.cpython-312.pyc b/venv/lib/python3.12/site-packages/werkzeug/datastructures/__pycache__/headers.cpython-312.pyc index 97a72038..72dc4e26 100644 Binary files a/venv/lib/python3.12/site-packages/werkzeug/datastructures/__pycache__/headers.cpython-312.pyc and b/venv/lib/python3.12/site-packages/werkzeug/datastructures/__pycache__/headers.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/werkzeug/datastructures/__pycache__/mixins.cpython-312.pyc b/venv/lib/python3.12/site-packages/werkzeug/datastructures/__pycache__/mixins.cpython-312.pyc index 14272a3f..97419787 100644 Binary files a/venv/lib/python3.12/site-packages/werkzeug/datastructures/__pycache__/mixins.cpython-312.pyc and b/venv/lib/python3.12/site-packages/werkzeug/datastructures/__pycache__/mixins.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/werkzeug/datastructures/__pycache__/range.cpython-312.pyc b/venv/lib/python3.12/site-packages/werkzeug/datastructures/__pycache__/range.cpython-312.pyc index f436e028..8c0e32a2 100644 Binary files a/venv/lib/python3.12/site-packages/werkzeug/datastructures/__pycache__/range.cpython-312.pyc and b/venv/lib/python3.12/site-packages/werkzeug/datastructures/__pycache__/range.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/werkzeug/datastructures/__pycache__/structures.cpython-312.pyc b/venv/lib/python3.12/site-packages/werkzeug/datastructures/__pycache__/structures.cpython-312.pyc index 489ecbc5..a6a0d2ea 100644 Binary files a/venv/lib/python3.12/site-packages/werkzeug/datastructures/__pycache__/structures.cpython-312.pyc and b/venv/lib/python3.12/site-packages/werkzeug/datastructures/__pycache__/structures.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/werkzeug/datastructures/cache_control.py b/venv/lib/python3.12/site-packages/werkzeug/datastructures/cache_control.py index 8d700ab6..3d963102 100644 --- a/venv/lib/python3.12/site-packages/werkzeug/datastructures/cache_control.py +++ b/venv/lib/python3.12/site-packages/werkzeug/datastructures/cache_control.py @@ -270,4 +270,4 @@ class ResponseCacheControl(_CacheControl): # circular dependencies -from .. import http +from .. import http # noqa: E402 diff --git a/venv/lib/python3.12/site-packages/werkzeug/datastructures/file_storage.py b/venv/lib/python3.12/site-packages/werkzeug/datastructures/file_storage.py index 12342447..3a17f512 100644 --- a/venv/lib/python3.12/site-packages/werkzeug/datastructures/file_storage.py +++ b/venv/lib/python3.12/site-packages/werkzeug/datastructures/file_storage.py @@ -206,4 +206,4 @@ class FileMultiDict(MultiDict[str, FileStorage]): # circular dependencies -from .. import http +from .. import http # noqa: E402 diff --git a/venv/lib/python3.12/site-packages/werkzeug/datastructures/headers.py b/venv/lib/python3.12/site-packages/werkzeug/datastructures/headers.py index 1088e3bc..b8983622 100644 --- a/venv/lib/python3.12/site-packages/werkzeug/datastructures/headers.py +++ b/venv/lib/python3.12/site-packages/werkzeug/datastructures/headers.py @@ -622,7 +622,7 @@ class EnvironHeaders(ImmutableHeadersMixin, Headers): # type: ignore[misc] return self.environ is other.environ - __hash__ = None # type: ignore[assignment] + __hash__ = None def __getitem__(self, key: str) -> str: # type: ignore[override] return self._get_key(key) @@ -659,4 +659,4 @@ class EnvironHeaders(ImmutableHeadersMixin, Headers): # type: ignore[misc] # circular dependencies -from .. import http +from .. import http # noqa: E402 diff --git a/venv/lib/python3.12/site-packages/werkzeug/datastructures/mixins.py b/venv/lib/python3.12/site-packages/werkzeug/datastructures/mixins.py index 03d461ad..8e00bdad 100644 --- a/venv/lib/python3.12/site-packages/werkzeug/datastructures/mixins.py +++ b/venv/lib/python3.12/site-packages/werkzeug/datastructures/mixins.py @@ -275,9 +275,9 @@ class UpdateDictMixin(dict[K, V]): ) -> V | T: modified = key in self if default is _missing: - rv = super().pop(key) + rv: V | T = super().pop(key) else: - rv = super().pop(key, default) # type: ignore[arg-type] + rv = super().pop(key, default) if modified and self.on_update is not None: self.on_update(self) return rv @@ -306,7 +306,7 @@ class UpdateDictMixin(dict[K, V]): **kwargs: V, ) -> None: if arg is None: - super().update(**kwargs) + super().update(**kwargs) # type: ignore[call-overload] else: super().update(arg, **kwargs) diff --git a/venv/lib/python3.12/site-packages/werkzeug/datastructures/range.py b/venv/lib/python3.12/site-packages/werkzeug/datastructures/range.py index 4c9f67d4..72f863d2 100644 --- a/venv/lib/python3.12/site-packages/werkzeug/datastructures/range.py +++ b/venv/lib/python3.12/site-packages/werkzeug/datastructures/range.py @@ -211,4 +211,4 @@ class ContentRange: # circular dependencies -from .. import http +from .. import http # noqa: E402 diff --git a/venv/lib/python3.12/site-packages/werkzeug/datastructures/structures.py b/venv/lib/python3.12/site-packages/werkzeug/datastructures/structures.py index dbb7e804..4efec1f5 100644 --- a/venv/lib/python3.12/site-packages/werkzeug/datastructures/structures.py +++ b/venv/lib/python3.12/site-packages/werkzeug/datastructures/structures.py @@ -190,7 +190,7 @@ class MultiDict(TypeConversionDict[K, V]): if mapping is None: super().__init__() elif isinstance(mapping, MultiDict): - super().__init__((k, vs[:]) for k, vs in mapping.lists()) + super().__init__((k, vs[:]) for k, vs in mapping.lists()) # type: ignore[misc] elif isinstance(mapping, cabc.Mapping): tmp = {} for key, value in mapping.items(): @@ -708,7 +708,7 @@ class _OrderedMultiDict(MultiDict[K, V]): yield values def add(self, key: K, value: V) -> None: - dict.setdefault(self, key, []).append(_omd_bucket(self, key, value)) # type: ignore[arg-type,attr-defined] + dict.setdefault(self, key, []).append(_omd_bucket(self, key, value)) # type: ignore[misc] @t.overload def getlist(self, key: K) -> list[V]: ... @@ -789,7 +789,7 @@ class _OrderedMultiDict(MultiDict[K, V]): buckets: list[_omd_bucket[K, V]] try: - key, buckets = dict.popitem(self) # type: ignore[arg-type,assignment] + key, buckets = dict.popitem(self) # type: ignore[arg-type] except KeyError as e: raise exceptions.BadRequestKeyError(e.args[0]) from None @@ -803,7 +803,7 @@ class _OrderedMultiDict(MultiDict[K, V]): buckets: list[_omd_bucket[K, V]] try: - key, buckets = dict.popitem(self) # type: ignore[arg-type,assignment] + key, buckets = dict.popitem(self) # type: ignore[arg-type] except KeyError as e: raise exceptions.BadRequestKeyError(e.args[0]) from None @@ -1212,7 +1212,7 @@ class HeaderSet(cabc.MutableSet[str]): # circular dependencies -from .. import http +from .. import http # noqa: E402 def __getattr__(name: str) -> t.Any: diff --git a/venv/lib/python3.12/site-packages/werkzeug/debug/__init__.py b/venv/lib/python3.12/site-packages/werkzeug/debug/__init__.py index 0c4cabd8..afc6dbbc 100644 --- a/venv/lib/python3.12/site-packages/werkzeug/debug/__init__.py +++ b/venv/lib/python3.12/site-packages/werkzeug/debug/__init__.py @@ -318,8 +318,11 @@ class DebuggedApplication: return self._pin @pin.setter - def pin(self, value: str) -> None: - self._pin = value + def pin(self, value: str | None) -> None: + if value is None: + del self._pin + else: + self._pin = value @property def pin_cookie_name(self) -> str: @@ -438,6 +441,11 @@ class DebuggedApplication: """ if self.pin is None: return True + + # If we failed too many times, then we're locked out. + if self._failed_pin_auth.value >= 10: + return False + val = parse_cookie(environ).get(self.pin_cookie_name) if not val or "|" not in val: return False @@ -487,7 +495,7 @@ class DebuggedApplication: auth = True # If we failed too many times, then we're locked out. - elif self._failed_pin_auth.value > 10: + elif self._failed_pin_auth.value >= 10: exhausted = True # Otherwise go through pin based authentication diff --git a/venv/lib/python3.12/site-packages/werkzeug/debug/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/werkzeug/debug/__pycache__/__init__.cpython-312.pyc index 0cddfef0..05a492fb 100644 Binary files a/venv/lib/python3.12/site-packages/werkzeug/debug/__pycache__/__init__.cpython-312.pyc and b/venv/lib/python3.12/site-packages/werkzeug/debug/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/werkzeug/debug/__pycache__/console.cpython-312.pyc b/venv/lib/python3.12/site-packages/werkzeug/debug/__pycache__/console.cpython-312.pyc index 82f2ade7..15f4a4c0 100644 Binary files a/venv/lib/python3.12/site-packages/werkzeug/debug/__pycache__/console.cpython-312.pyc and b/venv/lib/python3.12/site-packages/werkzeug/debug/__pycache__/console.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/werkzeug/debug/__pycache__/repr.cpython-312.pyc b/venv/lib/python3.12/site-packages/werkzeug/debug/__pycache__/repr.cpython-312.pyc index 3f6a0647..66f84253 100644 Binary files a/venv/lib/python3.12/site-packages/werkzeug/debug/__pycache__/repr.cpython-312.pyc and b/venv/lib/python3.12/site-packages/werkzeug/debug/__pycache__/repr.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/werkzeug/debug/__pycache__/tbtools.cpython-312.pyc b/venv/lib/python3.12/site-packages/werkzeug/debug/__pycache__/tbtools.cpython-312.pyc index 7d061aea..f357afbd 100644 Binary files a/venv/lib/python3.12/site-packages/werkzeug/debug/__pycache__/tbtools.cpython-312.pyc and b/venv/lib/python3.12/site-packages/werkzeug/debug/__pycache__/tbtools.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/werkzeug/debug/tbtools.py b/venv/lib/python3.12/site-packages/werkzeug/debug/tbtools.py index d922893e..ca4acf60 100644 --- a/venv/lib/python3.12/site-packages/werkzeug/debug/tbtools.py +++ b/venv/lib/python3.12/site-packages/werkzeug/debug/tbtools.py @@ -237,8 +237,8 @@ class DebugTraceback: def all_tracebacks( self, ) -> list[tuple[str | None, traceback.TracebackException]]: - out = [] - current = self._te + out: list[tuple[str | None, traceback.TracebackException]] = [] + current: traceback.TracebackException | None = self._te while current is not None: if current.__cause__ is not None: @@ -249,8 +249,7 @@ class DebugTraceback: chained_exc = current.__cause__ elif current.__context__ is not None and not current.__suppress_context__: chained_msg = ( - "During handling of the above exception, another" - " exception occurred" + "During handling of the above exception, another exception occurred" ) chained_exc = current.__context__ else: @@ -410,7 +409,7 @@ class DebugFrameSummary(traceback.FrameSummary): if cls == "current" and colno and end_colno: arrow = ( f'\n{" " * prefix}' - f'{" " * (colno - prefix)}{"^" * (end_colno - colno)}' + f"{' ' * (colno - prefix)}{'^' * (end_colno - colno)}" ) else: arrow = "" @@ -421,7 +420,7 @@ class DebugFrameSummary(traceback.FrameSummary): f"{arrow if arrow else ''}" ) - if lines: + if line_idx < len(lines): for line in lines[start_idx:line_idx]: render_line(line, "before") diff --git a/venv/lib/python3.12/site-packages/werkzeug/exceptions.py b/venv/lib/python3.12/site-packages/werkzeug/exceptions.py index 1cd99977..15f534e9 100644 --- a/venv/lib/python3.12/site-packages/werkzeug/exceptions.py +++ b/venv/lib/python3.12/site-packages/werkzeug/exceptions.py @@ -59,7 +59,7 @@ if t.TYPE_CHECKING: from _typeshed.wsgi import WSGIEnvironment from .datastructures import WWWAuthenticate - from .sansio.response import Response + from .sansio.response import Response as SansIOResponse from .wrappers.request import Request as WSGIRequest from .wrappers.response import Response as WSGIResponse @@ -79,7 +79,7 @@ class HTTPException(Exception): def __init__( self, description: str | None = None, - response: Response | None = None, + response: SansIOResponse | None = None, ) -> None: super().__init__() if description is not None: @@ -129,27 +129,42 @@ class HTTPException(Exception): """Get a list of headers.""" return [("Content-Type", "text/html; charset=utf-8")] + @t.overload + def get_response( + self, + environ: WSGIEnvironment | WSGIRequest | None = ..., + scope: None = None, + ) -> WSGIResponse: ... + @t.overload + def get_response( + self, + environ: None = None, + scope: dict[str, t.Any] = ..., + ) -> SansIOResponse: ... def get_response( self, environ: WSGIEnvironment | WSGIRequest | None = None, scope: dict[str, t.Any] | None = None, - ) -> Response: - """Get a response object. If one was passed to the exception - it's returned directly. + ) -> WSGIResponse | SansIOResponse: + """Get a response object. - :param environ: the optional environ for the request. This - can be used to modify the response depending - on how the request looked like. - :return: a :class:`Response` object or a subclass thereof. + :param environ: A WSGI environ dict or request object. If given, may be + used to customize the response based on the request. + :param scope: An ASGI scope dict. If given, may be used to customize the + response based on the request. + :return: A WSGI :class:`werkzeug.wrappers.Response` if called without + arguments or with ``environ``. A sans-IO + :class:`werkzeug.sansio.Response` for ASGI if called with + ``scope``. """ - from .wrappers.response import Response as WSGIResponse # noqa: F811 + from .wrappers.response import Response if self.response is not None: return self.response if environ is not None: environ = _get_environ(environ) headers = self.get_headers(environ, scope) - return WSGIResponse(self.get_body(environ, scope), self.code, headers) + return Response(self.get_body(environ, scope), self.code, headers) def __call__( self, environ: WSGIEnvironment, start_response: StartResponse @@ -160,7 +175,7 @@ class HTTPException(Exception): :param start_response: the response callable provided by the WSGI server. """ - response = t.cast("WSGIResponse", self.get_response(environ)) + response = self.get_response(environ) return response(environ, start_response) def __str__(self) -> str: @@ -181,8 +196,7 @@ class BadRequest(HTTPException): code = 400 description = ( - "The browser (or proxy) sent a request that this server could " - "not understand." + "The browser (or proxy) sent a request that this server could not understand." ) @@ -205,13 +219,10 @@ class BadRequestKeyError(BadRequest, KeyError): else: KeyError.__init__(self, arg) - @property # type: ignore + @property def description(self) -> str: if self.show_exception: - return ( - f"{self._description}\n" - f"{KeyError.__name__}: {KeyError.__str__(self)}" - ) + return f"{self._description}\n{KeyError.__name__}: {KeyError.__str__(self)}" return self._description @@ -299,7 +310,7 @@ class Unauthorized(HTTPException): def __init__( self, description: str | None = None, - response: Response | None = None, + response: SansIOResponse | None = None, www_authenticate: None | (WWWAuthenticate | t.Iterable[WWWAuthenticate]) = None, ) -> None: super().__init__(description, response) @@ -368,7 +379,7 @@ class MethodNotAllowed(HTTPException): self, valid_methods: t.Iterable[str] | None = None, description: str | None = None, - response: Response | None = None, + response: SansIOResponse | None = None, ) -> None: """Takes an optional list of valid http methods starting with werkzeug 0.3 the list will be mandatory.""" @@ -526,7 +537,7 @@ class RequestedRangeNotSatisfiable(HTTPException): length: int | None = None, units: str = "bytes", description: str | None = None, - response: Response | None = None, + response: SansIOResponse | None = None, ) -> None: """Takes an optional `Content-Range` header value based on ``length`` parameter. @@ -651,7 +662,7 @@ class _RetryAfter(HTTPException): def __init__( self, description: str | None = None, - response: Response | None = None, + response: SansIOResponse | None = None, retry_after: datetime | int | None = None, ) -> None: super().__init__(description, response) @@ -741,7 +752,7 @@ class InternalServerError(HTTPException): def __init__( self, description: str | None = None, - response: Response | None = None, + response: SansIOResponse | None = None, original_exception: BaseException | None = None, ) -> None: #: The original exception that caused this 500 error. Can be @@ -863,7 +874,7 @@ class Aborter: self.mapping.update(extra) def __call__( - self, code: int | Response, *args: t.Any, **kwargs: t.Any + self, code: int | SansIOResponse, *args: t.Any, **kwargs: t.Any ) -> t.NoReturn: from .sansio.response import Response @@ -876,7 +887,7 @@ class Aborter: raise self.mapping[code](*args, **kwargs) -def abort(status: int | Response, *args: t.Any, **kwargs: t.Any) -> t.NoReturn: +def abort(status: int | SansIOResponse, *args: t.Any, **kwargs: t.Any) -> t.NoReturn: """Raises an :py:exc:`HTTPException` for the given status code or WSGI application. diff --git a/venv/lib/python3.12/site-packages/werkzeug/http.py b/venv/lib/python3.12/site-packages/werkzeug/http.py index f1dbb850..aee4f59a 100644 --- a/venv/lib/python3.12/site-packages/werkzeug/http.py +++ b/venv/lib/python3.12/site-packages/werkzeug/http.py @@ -1401,5 +1401,5 @@ def is_byte_range_valid( # circular dependencies -from . import datastructures as ds -from .sansio import http as _sansio_http +from . import datastructures as ds # noqa: E402 +from .sansio import http as _sansio_http # noqa: E402 diff --git a/venv/lib/python3.12/site-packages/werkzeug/local.py b/venv/lib/python3.12/site-packages/werkzeug/local.py index 302589bb..32c1aca8 100644 --- a/venv/lib/python3.12/site-packages/werkzeug/local.py +++ b/venv/lib/python3.12/site-packages/werkzeug/local.py @@ -531,7 +531,7 @@ class LocalProxy(t.Generic[T]): object.__setattr__(self, "_LocalProxy__wrapped", local) object.__setattr__(self, "_get_current_object", _get_current_object) - __doc__ = _ProxyLookup( # type: ignore[assignment] + __doc__ = _ProxyLookup( class_value=__doc__, fallback=lambda self: type(self).__doc__, is_attr=True ) __wrapped__ = _ProxyLookup( @@ -539,25 +539,25 @@ class LocalProxy(t.Generic[T]): is_attr=True, ) # __del__ should only delete the proxy - __repr__ = _ProxyLookup( # type: ignore[assignment] + __repr__ = _ProxyLookup( repr, fallback=lambda self: f"<{type(self).__name__} unbound>" ) - __str__ = _ProxyLookup(str) # type: ignore[assignment] + __str__ = _ProxyLookup(str) __bytes__ = _ProxyLookup(bytes) - __format__ = _ProxyLookup() # type: ignore[assignment] + __format__ = _ProxyLookup() __lt__ = _ProxyLookup(operator.lt) __le__ = _ProxyLookup(operator.le) - __eq__ = _ProxyLookup(operator.eq) # type: ignore[assignment] - __ne__ = _ProxyLookup(operator.ne) # type: ignore[assignment] + __eq__ = _ProxyLookup(operator.eq) + __ne__ = _ProxyLookup(operator.ne) __gt__ = _ProxyLookup(operator.gt) __ge__ = _ProxyLookup(operator.ge) - __hash__ = _ProxyLookup(hash) # type: ignore[assignment] + __hash__ = _ProxyLookup(hash) __bool__ = _ProxyLookup(bool, fallback=lambda self: False) __getattr__ = _ProxyLookup(getattr) # __getattribute__ triggered through __getattr__ - __setattr__ = _ProxyLookup(setattr) # type: ignore[assignment] - __delattr__ = _ProxyLookup(delattr) # type: ignore[assignment] - __dir__ = _ProxyLookup(dir, fallback=lambda self: []) # type: ignore[assignment] + __setattr__ = _ProxyLookup(setattr) + __delattr__ = _ProxyLookup(delattr) + __dir__ = _ProxyLookup(dir, fallback=lambda self: []) # __get__ (proxying descriptor not supported) # __set__ (descriptor) # __delete__ (descriptor) @@ -568,7 +568,7 @@ class LocalProxy(t.Generic[T]): # __weakref__ (__getattr__) # __init_subclass__ (proxying metaclass not supported) # __prepare__ (metaclass) - __class__ = _ProxyLookup(fallback=lambda self: type(self), is_attr=True) # type: ignore[assignment] + __class__ = _ProxyLookup(fallback=lambda self: type(self), is_attr=True) __instancecheck__ = _ProxyLookup(lambda self, other: isinstance(other, self)) __subclasscheck__ = _ProxyLookup(lambda self, other: issubclass(other, self)) # __class_getitem__ triggered through __getitem__ diff --git a/venv/lib/python3.12/site-packages/werkzeug/middleware/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/werkzeug/middleware/__pycache__/__init__.cpython-312.pyc index cbad5bb6..174233c0 100644 Binary files a/venv/lib/python3.12/site-packages/werkzeug/middleware/__pycache__/__init__.cpython-312.pyc and b/venv/lib/python3.12/site-packages/werkzeug/middleware/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/werkzeug/middleware/__pycache__/dispatcher.cpython-312.pyc b/venv/lib/python3.12/site-packages/werkzeug/middleware/__pycache__/dispatcher.cpython-312.pyc index 610fecb9..53411a58 100644 Binary files a/venv/lib/python3.12/site-packages/werkzeug/middleware/__pycache__/dispatcher.cpython-312.pyc and b/venv/lib/python3.12/site-packages/werkzeug/middleware/__pycache__/dispatcher.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/werkzeug/middleware/__pycache__/http_proxy.cpython-312.pyc b/venv/lib/python3.12/site-packages/werkzeug/middleware/__pycache__/http_proxy.cpython-312.pyc index 2fabaeea..012c838b 100644 Binary files a/venv/lib/python3.12/site-packages/werkzeug/middleware/__pycache__/http_proxy.cpython-312.pyc and b/venv/lib/python3.12/site-packages/werkzeug/middleware/__pycache__/http_proxy.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/werkzeug/middleware/__pycache__/lint.cpython-312.pyc b/venv/lib/python3.12/site-packages/werkzeug/middleware/__pycache__/lint.cpython-312.pyc index 67f7314e..a8ae4db7 100644 Binary files a/venv/lib/python3.12/site-packages/werkzeug/middleware/__pycache__/lint.cpython-312.pyc and b/venv/lib/python3.12/site-packages/werkzeug/middleware/__pycache__/lint.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/werkzeug/middleware/__pycache__/profiler.cpython-312.pyc b/venv/lib/python3.12/site-packages/werkzeug/middleware/__pycache__/profiler.cpython-312.pyc index 34284bc6..1950d587 100644 Binary files a/venv/lib/python3.12/site-packages/werkzeug/middleware/__pycache__/profiler.cpython-312.pyc and b/venv/lib/python3.12/site-packages/werkzeug/middleware/__pycache__/profiler.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/werkzeug/middleware/__pycache__/proxy_fix.cpython-312.pyc b/venv/lib/python3.12/site-packages/werkzeug/middleware/__pycache__/proxy_fix.cpython-312.pyc index 2b699781..6d134e3b 100644 Binary files a/venv/lib/python3.12/site-packages/werkzeug/middleware/__pycache__/proxy_fix.cpython-312.pyc and b/venv/lib/python3.12/site-packages/werkzeug/middleware/__pycache__/proxy_fix.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/werkzeug/middleware/__pycache__/shared_data.cpython-312.pyc b/venv/lib/python3.12/site-packages/werkzeug/middleware/__pycache__/shared_data.cpython-312.pyc index 2faf6618..a546955d 100644 Binary files a/venv/lib/python3.12/site-packages/werkzeug/middleware/__pycache__/shared_data.cpython-312.pyc and b/venv/lib/python3.12/site-packages/werkzeug/middleware/__pycache__/shared_data.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/werkzeug/routing/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/werkzeug/routing/__pycache__/__init__.cpython-312.pyc index fdd776db..b5bfda5d 100644 Binary files a/venv/lib/python3.12/site-packages/werkzeug/routing/__pycache__/__init__.cpython-312.pyc and b/venv/lib/python3.12/site-packages/werkzeug/routing/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/werkzeug/routing/__pycache__/converters.cpython-312.pyc b/venv/lib/python3.12/site-packages/werkzeug/routing/__pycache__/converters.cpython-312.pyc index c6f0d4f1..96aaf427 100644 Binary files a/venv/lib/python3.12/site-packages/werkzeug/routing/__pycache__/converters.cpython-312.pyc and b/venv/lib/python3.12/site-packages/werkzeug/routing/__pycache__/converters.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/werkzeug/routing/__pycache__/exceptions.cpython-312.pyc b/venv/lib/python3.12/site-packages/werkzeug/routing/__pycache__/exceptions.cpython-312.pyc index 8cc9b304..94f8be76 100644 Binary files a/venv/lib/python3.12/site-packages/werkzeug/routing/__pycache__/exceptions.cpython-312.pyc and b/venv/lib/python3.12/site-packages/werkzeug/routing/__pycache__/exceptions.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/werkzeug/routing/__pycache__/map.cpython-312.pyc b/venv/lib/python3.12/site-packages/werkzeug/routing/__pycache__/map.cpython-312.pyc index 143f6d31..4cfc0596 100644 Binary files a/venv/lib/python3.12/site-packages/werkzeug/routing/__pycache__/map.cpython-312.pyc and b/venv/lib/python3.12/site-packages/werkzeug/routing/__pycache__/map.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/werkzeug/routing/__pycache__/matcher.cpython-312.pyc b/venv/lib/python3.12/site-packages/werkzeug/routing/__pycache__/matcher.cpython-312.pyc index 402e0a46..7433d351 100644 Binary files a/venv/lib/python3.12/site-packages/werkzeug/routing/__pycache__/matcher.cpython-312.pyc and b/venv/lib/python3.12/site-packages/werkzeug/routing/__pycache__/matcher.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/werkzeug/routing/__pycache__/rules.cpython-312.pyc b/venv/lib/python3.12/site-packages/werkzeug/routing/__pycache__/rules.cpython-312.pyc index 42f5d3c0..42719651 100644 Binary files a/venv/lib/python3.12/site-packages/werkzeug/routing/__pycache__/rules.cpython-312.pyc and b/venv/lib/python3.12/site-packages/werkzeug/routing/__pycache__/rules.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/werkzeug/routing/rules.py b/venv/lib/python3.12/site-packages/werkzeug/routing/rules.py index 2dad31dd..9e483a54 100644 --- a/venv/lib/python3.12/site-packages/werkzeug/routing/rules.py +++ b/venv/lib/python3.12/site-packages/werkzeug/routing/rules.py @@ -118,7 +118,7 @@ def parse_converter_args(argstr: str) -> tuple[tuple[t.Any, ...], dict[str, t.An for item in _converter_args_re.finditer(argstr): if item.start() != position: raise ValueError( - f"Cannot parse converter argument '{argstr[position:item.start()]}'" + f"Cannot parse converter argument '{argstr[position : item.start()]}'" ) value = item.group("stringval") @@ -776,7 +776,7 @@ class Rule(RuleFactory): ret = [parts[0]] for p in parts[1:]: if isinstance(p, ast.Constant) and isinstance(ret[-1], ast.Constant): - ret[-1] = ast.Constant(ret[-1].value + p.value) + ret[-1] = ast.Constant(ret[-1].value + p.value) # type: ignore[operator] else: ret.append(p) return ret diff --git a/venv/lib/python3.12/site-packages/werkzeug/sansio/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/werkzeug/sansio/__pycache__/__init__.cpython-312.pyc index fdb728a9..8bd259cf 100644 Binary files a/venv/lib/python3.12/site-packages/werkzeug/sansio/__pycache__/__init__.cpython-312.pyc and b/venv/lib/python3.12/site-packages/werkzeug/sansio/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/werkzeug/sansio/__pycache__/http.cpython-312.pyc b/venv/lib/python3.12/site-packages/werkzeug/sansio/__pycache__/http.cpython-312.pyc index c1f2acc3..cb916dd1 100644 Binary files a/venv/lib/python3.12/site-packages/werkzeug/sansio/__pycache__/http.cpython-312.pyc and b/venv/lib/python3.12/site-packages/werkzeug/sansio/__pycache__/http.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/werkzeug/sansio/__pycache__/multipart.cpython-312.pyc b/venv/lib/python3.12/site-packages/werkzeug/sansio/__pycache__/multipart.cpython-312.pyc index a17c56e0..667ff569 100644 Binary files a/venv/lib/python3.12/site-packages/werkzeug/sansio/__pycache__/multipart.cpython-312.pyc and b/venv/lib/python3.12/site-packages/werkzeug/sansio/__pycache__/multipart.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/werkzeug/sansio/__pycache__/request.cpython-312.pyc b/venv/lib/python3.12/site-packages/werkzeug/sansio/__pycache__/request.cpython-312.pyc index 07d1221d..c3c31149 100644 Binary files a/venv/lib/python3.12/site-packages/werkzeug/sansio/__pycache__/request.cpython-312.pyc and b/venv/lib/python3.12/site-packages/werkzeug/sansio/__pycache__/request.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/werkzeug/sansio/__pycache__/response.cpython-312.pyc b/venv/lib/python3.12/site-packages/werkzeug/sansio/__pycache__/response.cpython-312.pyc index bc9fa61f..5165a4b6 100644 Binary files a/venv/lib/python3.12/site-packages/werkzeug/sansio/__pycache__/response.cpython-312.pyc and b/venv/lib/python3.12/site-packages/werkzeug/sansio/__pycache__/response.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/werkzeug/sansio/__pycache__/utils.cpython-312.pyc b/venv/lib/python3.12/site-packages/werkzeug/sansio/__pycache__/utils.cpython-312.pyc index 9719d0d5..4da41353 100644 Binary files a/venv/lib/python3.12/site-packages/werkzeug/sansio/__pycache__/utils.cpython-312.pyc and b/venv/lib/python3.12/site-packages/werkzeug/sansio/__pycache__/utils.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/werkzeug/sansio/http.py b/venv/lib/python3.12/site-packages/werkzeug/sansio/http.py index f02d7fd5..c68a57ff 100644 --- a/venv/lib/python3.12/site-packages/werkzeug/sansio/http.py +++ b/venv/lib/python3.12/site-packages/werkzeug/sansio/http.py @@ -167,4 +167,4 @@ def parse_cookie( # circular dependencies -from .. import datastructures as ds +from .. import datastructures as ds # noqa: E402 diff --git a/venv/lib/python3.12/site-packages/werkzeug/sansio/multipart.py b/venv/lib/python3.12/site-packages/werkzeug/sansio/multipart.py index 731be033..279b25e7 100644 --- a/venv/lib/python3.12/site-packages/werkzeug/sansio/multipart.py +++ b/venv/lib/python3.12/site-packages/werkzeug/sansio/multipart.py @@ -79,6 +79,9 @@ class MultipartDecoder: The part data is returned as available to allow the caller to save the data from memory to disk, if desired. + + .. versionchanged:: 3.1.4 + Handle chunks that split a``\r\n`` sequence. """ def __init__( @@ -121,7 +124,7 @@ class MultipartDecoder: self._search_position = 0 self._parts_decoded = 0 - def last_newline(self, data: bytes) -> int: + def last_newline(self, data: bytes | bytearray) -> int: try: last_nl = data.rindex(b"\n") except ValueError: @@ -232,7 +235,7 @@ class MultipartDecoder: return event - def _parse_headers(self, data: bytes) -> Headers: + def _parse_headers(self, data: bytes | bytearray) -> Headers: headers: list[tuple[str, str]] = [] # Merge the continued headers into one line data = HEADER_CONTINUATION_RE.sub(b" ", data) @@ -245,7 +248,9 @@ class MultipartDecoder: headers.append((name.strip(), value.strip())) return Headers(headers) - def _parse_data(self, data: bytes, *, start: bool) -> tuple[bytes, int, bool]: + def _parse_data( + self, data: bytes | bytearray, *, start: bool + ) -> tuple[bytes, int, bool]: # Body parts must start with CRLF (or CR or LF) if start: match = LINE_BREAK_RE.match(data) @@ -281,6 +286,11 @@ class MultipartDecoder: data_end = del_index = self.last_newline(data[data_start:]) + data_start more_data = match is None + # Keep \r\n sequence intact rather than splitting across chunks. + if data_end > data_start and data[data_end - 1] == 0x0D: + data_end -= 1 + del_index -= 1 + return bytes(data[data_start:data_end]), del_index, more_data diff --git a/venv/lib/python3.12/site-packages/werkzeug/sansio/response.py b/venv/lib/python3.12/site-packages/werkzeug/sansio/response.py index 9fed0862..d7efbe47 100644 --- a/venv/lib/python3.12/site-packages/werkzeug/sansio/response.py +++ b/venv/lib/python3.12/site-packages/werkzeug/sansio/response.py @@ -134,7 +134,7 @@ class Response: self.headers["Content-Type"] = content_type if status is None: status = self.default_status - self.status = status # type: ignore + self.status = status def __repr__(self) -> str: return f"<{type(self).__name__} [{self.status}]>" @@ -146,7 +146,7 @@ class Response: @status_code.setter def status_code(self, code: int) -> None: - self.status = code # type: ignore + self.status = code @property def status(self) -> str: diff --git a/venv/lib/python3.12/site-packages/werkzeug/security.py b/venv/lib/python3.12/site-packages/werkzeug/security.py index 3f49ad1b..9c0b7d53 100644 --- a/venv/lib/python3.12/site-packages/werkzeug/security.py +++ b/venv/lib/python3.12/site-packages/werkzeug/security.py @@ -12,6 +12,14 @@ DEFAULT_PBKDF2_ITERATIONS = 1_000_000 _os_alt_seps: list[str] = list( sep for sep in [os.sep, os.path.altsep] if sep is not None and sep != "/" ) +_windows_device_files = { + "CON", + "PRN", + "AUX", + "NUL", + *(f"COM{i}" for i in range(10)), + *(f"LPT{i}" for i in range(10)), +} def gen_salt(length: int) -> str: @@ -139,6 +147,9 @@ def safe_join(directory: str, *pathnames: str) -> str | None: :param pathnames: The untrusted path components relative to the base directory. :return: A safe path, otherwise ``None``. + + .. versionchanged:: 3.1.4 + Special device names are disallowed on Windows. """ if not directory: # Ensure we end up with ./path if directory="" is given, @@ -153,6 +164,10 @@ def safe_join(directory: str, *pathnames: str) -> str | None: if ( any(sep in filename for sep in _os_alt_seps) + or ( + os.name == "nt" + and os.path.splitext(filename)[0].upper() in _windows_device_files + ) or os.path.isabs(filename) # ntpath.isabs doesn't catch this on Python < 3.11 or filename.startswith("/") diff --git a/venv/lib/python3.12/site-packages/werkzeug/serving.py b/venv/lib/python3.12/site-packages/werkzeug/serving.py index ec166408..8605cfd6 100644 --- a/venv/lib/python3.12/site-packages/werkzeug/serving.py +++ b/venv/lib/python3.12/site-packages/werkzeug/serving.py @@ -774,8 +774,8 @@ class BaseWSGIServer(HTTPServer): if sys.platform == "darwin" and port == 5000: print( - "On macOS, try disabling the 'AirPlay Receiver' service" - " from System Preferences -> General -> AirDrop & Handoff.", + "On macOS, try searching for and disabling" + " 'AirPlay Receiver' in System Settings.", file=sys.stderr, ) diff --git a/venv/lib/python3.12/site-packages/werkzeug/test.py b/venv/lib/python3.12/site-packages/werkzeug/test.py index 5c3c6088..58b4ce7f 100644 --- a/venv/lib/python3.12/site-packages/werkzeug/test.py +++ b/venv/lib/python3.12/site-packages/werkzeug/test.py @@ -326,7 +326,7 @@ class EnvironBuilder: self.request_uri = path if base_url is not None: base_url = iri_to_uri(base_url) - self.base_url = base_url # type: ignore + self.base_url = base_url if isinstance(query_string, str): self.query_string = query_string else: diff --git a/venv/lib/python3.12/site-packages/werkzeug/utils.py b/venv/lib/python3.12/site-packages/werkzeug/utils.py index 3d3bbf06..0d4a700f 100644 --- a/venv/lib/python3.12/site-packages/werkzeug/utils.py +++ b/venv/lib/python3.12/site-packages/werkzeug/utils.py @@ -21,6 +21,7 @@ from ._internal import _TAccessorValue from .datastructures import Headers from .exceptions import NotFound from .exceptions import RequestedRangeNotSatisfiable +from .security import _windows_device_files from .security import safe_join from .wsgi import wrap_file @@ -34,14 +35,6 @@ _T = t.TypeVar("_T") _entity_re = re.compile(r"&([^;]+);") _filename_ascii_strip_re = re.compile(r"[^A-Za-z0-9_.-]") -_windows_device_files = { - "CON", - "PRN", - "AUX", - "NUL", - *(f"COM{i}" for i in range(10)), - *(f"LPT{i}" for i in range(10)), -} class cached_property(property, t.Generic[_T]): diff --git a/venv/lib/python3.12/site-packages/werkzeug/wrappers/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/werkzeug/wrappers/__pycache__/__init__.cpython-312.pyc index fb68a5aa..a37fefaf 100644 Binary files a/venv/lib/python3.12/site-packages/werkzeug/wrappers/__pycache__/__init__.cpython-312.pyc and b/venv/lib/python3.12/site-packages/werkzeug/wrappers/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/werkzeug/wrappers/__pycache__/request.cpython-312.pyc b/venv/lib/python3.12/site-packages/werkzeug/wrappers/__pycache__/request.cpython-312.pyc index 386fddaf..f1179106 100644 Binary files a/venv/lib/python3.12/site-packages/werkzeug/wrappers/__pycache__/request.cpython-312.pyc and b/venv/lib/python3.12/site-packages/werkzeug/wrappers/__pycache__/request.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/werkzeug/wrappers/__pycache__/response.cpython-312.pyc b/venv/lib/python3.12/site-packages/werkzeug/wrappers/__pycache__/response.cpython-312.pyc index 8f8e9e00..96e94c4f 100644 Binary files a/venv/lib/python3.12/site-packages/werkzeug/wrappers/__pycache__/response.cpython-312.pyc and b/venv/lib/python3.12/site-packages/werkzeug/wrappers/__pycache__/response.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/werkzeug/wrappers/request.py b/venv/lib/python3.12/site-packages/werkzeug/wrappers/request.py index 719a3bc0..9f1eee11 100644 --- a/venv/lib/python3.12/site-packages/werkzeug/wrappers/request.py +++ b/venv/lib/python3.12/site-packages/werkzeug/wrappers/request.py @@ -499,7 +499,7 @@ class Request(_SansIORequest): @property def script_root(self) -> str: - """Alias for :attr:`self.root_path`. ``environ["SCRIPT_ROOT"]`` + """Alias for :attr:`self.root_path`. ``environ["SCRIPT_NAME"]`` without a trailing slash. """ return self.root_path @@ -542,7 +542,7 @@ class Request(_SansIORequest): json_module = json @property - def json(self) -> t.Any | None: + def json(self) -> t.Any: """The parsed JSON data if :attr:`mimetype` indicates JSON (:mimetype:`application/json`, see :attr:`is_json`). diff --git a/venv/lib/python3.12/site-packages/werkzeug/wrappers/response.py b/venv/lib/python3.12/site-packages/werkzeug/wrappers/response.py index 7f01287c..e05fa8a1 100644 --- a/venv/lib/python3.12/site-packages/werkzeug/wrappers/response.py +++ b/venv/lib/python3.12/site-packages/werkzeug/wrappers/response.py @@ -697,7 +697,7 @@ class Response(_SansIOResponse): content_length = range_tuple[1] - range_tuple[0] self.headers["Content-Length"] = str(content_length) self.headers["Accept-Ranges"] = accept_ranges - self.content_range = content_range_header # type: ignore + self.content_range = content_range_header self.status_code = 206 self._wrap_range_response(range_tuple[0], content_length) return True diff --git a/venv/lib/python3.12/site-packages/werkzeug/wsgi.py b/venv/lib/python3.12/site-packages/werkzeug/wsgi.py index 01d40af2..2d071c78 100644 --- a/venv/lib/python3.12/site-packages/werkzeug/wsgi.py +++ b/venv/lib/python3.12/site-packages/werkzeug/wsgi.py @@ -75,7 +75,7 @@ def _get_server( return None try: - port: int | None = int(environ.get("SERVER_PORT", None)) + port: int | None = int(environ.get("SERVER_PORT", None)) # type: ignore[arg-type] except (TypeError, ValueError): # unix socket port = None diff --git a/venv/lib/python3.12/site-packages/yaml/__init__.py b/venv/lib/python3.12/site-packages/yaml/__init__.py new file mode 100644 index 00000000..d58f0891 --- /dev/null +++ b/venv/lib/python3.12/site-packages/yaml/__init__.py @@ -0,0 +1,390 @@ + +from .error import * + +from .tokens import * +from .events import * +from .nodes import * + +from .loader import * +from .dumper import * + +__version__ = '6.0.3' +try: + from .cyaml import * + __with_libyaml__ = True +except ImportError: + __with_libyaml__ = False + +import io + +#------------------------------------------------------------------------------ +# XXX "Warnings control" is now deprecated. Leaving in the API function to not +# break code that uses it. +#------------------------------------------------------------------------------ +def warnings(settings=None): + if settings is None: + return {} + +#------------------------------------------------------------------------------ +def scan(stream, Loader=Loader): + """ + Scan a YAML stream and produce scanning tokens. + """ + loader = Loader(stream) + try: + while loader.check_token(): + yield loader.get_token() + finally: + loader.dispose() + +def parse(stream, Loader=Loader): + """ + Parse a YAML stream and produce parsing events. + """ + loader = Loader(stream) + try: + while loader.check_event(): + yield loader.get_event() + finally: + loader.dispose() + +def compose(stream, Loader=Loader): + """ + Parse the first YAML document in a stream + and produce the corresponding representation tree. + """ + loader = Loader(stream) + try: + return loader.get_single_node() + finally: + loader.dispose() + +def compose_all(stream, Loader=Loader): + """ + Parse all YAML documents in a stream + and produce corresponding representation trees. + """ + loader = Loader(stream) + try: + while loader.check_node(): + yield loader.get_node() + finally: + loader.dispose() + +def load(stream, Loader): + """ + Parse the first YAML document in a stream + and produce the corresponding Python object. + """ + loader = Loader(stream) + try: + return loader.get_single_data() + finally: + loader.dispose() + +def load_all(stream, Loader): + """ + Parse all YAML documents in a stream + and produce corresponding Python objects. + """ + loader = Loader(stream) + try: + while loader.check_data(): + yield loader.get_data() + finally: + loader.dispose() + +def full_load(stream): + """ + Parse the first YAML document in a stream + and produce the corresponding Python object. + + Resolve all tags except those known to be + unsafe on untrusted input. + """ + return load(stream, FullLoader) + +def full_load_all(stream): + """ + Parse all YAML documents in a stream + and produce corresponding Python objects. + + Resolve all tags except those known to be + unsafe on untrusted input. + """ + return load_all(stream, FullLoader) + +def safe_load(stream): + """ + Parse the first YAML document in a stream + and produce the corresponding Python object. + + Resolve only basic YAML tags. This is known + to be safe for untrusted input. + """ + return load(stream, SafeLoader) + +def safe_load_all(stream): + """ + Parse all YAML documents in a stream + and produce corresponding Python objects. + + Resolve only basic YAML tags. This is known + to be safe for untrusted input. + """ + return load_all(stream, SafeLoader) + +def unsafe_load(stream): + """ + Parse the first YAML document in a stream + and produce the corresponding Python object. + + Resolve all tags, even those known to be + unsafe on untrusted input. + """ + return load(stream, UnsafeLoader) + +def unsafe_load_all(stream): + """ + Parse all YAML documents in a stream + and produce corresponding Python objects. + + Resolve all tags, even those known to be + unsafe on untrusted input. + """ + return load_all(stream, UnsafeLoader) + +def emit(events, stream=None, Dumper=Dumper, + canonical=None, indent=None, width=None, + allow_unicode=None, line_break=None): + """ + Emit YAML parsing events into a stream. + If stream is None, return the produced string instead. + """ + getvalue = None + if stream is None: + stream = io.StringIO() + getvalue = stream.getvalue + dumper = Dumper(stream, canonical=canonical, indent=indent, width=width, + allow_unicode=allow_unicode, line_break=line_break) + try: + for event in events: + dumper.emit(event) + finally: + dumper.dispose() + if getvalue: + return getvalue() + +def serialize_all(nodes, stream=None, Dumper=Dumper, + canonical=None, indent=None, width=None, + allow_unicode=None, line_break=None, + encoding=None, explicit_start=None, explicit_end=None, + version=None, tags=None): + """ + Serialize a sequence of representation trees into a YAML stream. + If stream is None, return the produced string instead. + """ + getvalue = None + if stream is None: + if encoding is None: + stream = io.StringIO() + else: + stream = io.BytesIO() + getvalue = stream.getvalue + dumper = Dumper(stream, canonical=canonical, indent=indent, width=width, + allow_unicode=allow_unicode, line_break=line_break, + encoding=encoding, version=version, tags=tags, + explicit_start=explicit_start, explicit_end=explicit_end) + try: + dumper.open() + for node in nodes: + dumper.serialize(node) + dumper.close() + finally: + dumper.dispose() + if getvalue: + return getvalue() + +def serialize(node, stream=None, Dumper=Dumper, **kwds): + """ + Serialize a representation tree into a YAML stream. + If stream is None, return the produced string instead. + """ + return serialize_all([node], stream, Dumper=Dumper, **kwds) + +def dump_all(documents, stream=None, Dumper=Dumper, + default_style=None, default_flow_style=False, + canonical=None, indent=None, width=None, + allow_unicode=None, line_break=None, + encoding=None, explicit_start=None, explicit_end=None, + version=None, tags=None, sort_keys=True): + """ + Serialize a sequence of Python objects into a YAML stream. + If stream is None, return the produced string instead. + """ + getvalue = None + if stream is None: + if encoding is None: + stream = io.StringIO() + else: + stream = io.BytesIO() + getvalue = stream.getvalue + dumper = Dumper(stream, default_style=default_style, + default_flow_style=default_flow_style, + canonical=canonical, indent=indent, width=width, + allow_unicode=allow_unicode, line_break=line_break, + encoding=encoding, version=version, tags=tags, + explicit_start=explicit_start, explicit_end=explicit_end, sort_keys=sort_keys) + try: + dumper.open() + for data in documents: + dumper.represent(data) + dumper.close() + finally: + dumper.dispose() + if getvalue: + return getvalue() + +def dump(data, stream=None, Dumper=Dumper, **kwds): + """ + Serialize a Python object into a YAML stream. + If stream is None, return the produced string instead. + """ + return dump_all([data], stream, Dumper=Dumper, **kwds) + +def safe_dump_all(documents, stream=None, **kwds): + """ + Serialize a sequence of Python objects into a YAML stream. + Produce only basic YAML tags. + If stream is None, return the produced string instead. + """ + return dump_all(documents, stream, Dumper=SafeDumper, **kwds) + +def safe_dump(data, stream=None, **kwds): + """ + Serialize a Python object into a YAML stream. + Produce only basic YAML tags. + If stream is None, return the produced string instead. + """ + return dump_all([data], stream, Dumper=SafeDumper, **kwds) + +def add_implicit_resolver(tag, regexp, first=None, + Loader=None, Dumper=Dumper): + """ + Add an implicit scalar detector. + If an implicit scalar value matches the given regexp, + the corresponding tag is assigned to the scalar. + first is a sequence of possible initial characters or None. + """ + if Loader is None: + loader.Loader.add_implicit_resolver(tag, regexp, first) + loader.FullLoader.add_implicit_resolver(tag, regexp, first) + loader.UnsafeLoader.add_implicit_resolver(tag, regexp, first) + else: + Loader.add_implicit_resolver(tag, regexp, first) + Dumper.add_implicit_resolver(tag, regexp, first) + +def add_path_resolver(tag, path, kind=None, Loader=None, Dumper=Dumper): + """ + Add a path based resolver for the given tag. + A path is a list of keys that forms a path + to a node in the representation tree. + Keys can be string values, integers, or None. + """ + if Loader is None: + loader.Loader.add_path_resolver(tag, path, kind) + loader.FullLoader.add_path_resolver(tag, path, kind) + loader.UnsafeLoader.add_path_resolver(tag, path, kind) + else: + Loader.add_path_resolver(tag, path, kind) + Dumper.add_path_resolver(tag, path, kind) + +def add_constructor(tag, constructor, Loader=None): + """ + Add a constructor for the given tag. + Constructor is a function that accepts a Loader instance + and a node object and produces the corresponding Python object. + """ + if Loader is None: + loader.Loader.add_constructor(tag, constructor) + loader.FullLoader.add_constructor(tag, constructor) + loader.UnsafeLoader.add_constructor(tag, constructor) + else: + Loader.add_constructor(tag, constructor) + +def add_multi_constructor(tag_prefix, multi_constructor, Loader=None): + """ + Add a multi-constructor for the given tag prefix. + Multi-constructor is called for a node if its tag starts with tag_prefix. + Multi-constructor accepts a Loader instance, a tag suffix, + and a node object and produces the corresponding Python object. + """ + if Loader is None: + loader.Loader.add_multi_constructor(tag_prefix, multi_constructor) + loader.FullLoader.add_multi_constructor(tag_prefix, multi_constructor) + loader.UnsafeLoader.add_multi_constructor(tag_prefix, multi_constructor) + else: + Loader.add_multi_constructor(tag_prefix, multi_constructor) + +def add_representer(data_type, representer, Dumper=Dumper): + """ + Add a representer for the given type. + Representer is a function accepting a Dumper instance + and an instance of the given data type + and producing the corresponding representation node. + """ + Dumper.add_representer(data_type, representer) + +def add_multi_representer(data_type, multi_representer, Dumper=Dumper): + """ + Add a representer for the given type. + Multi-representer is a function accepting a Dumper instance + and an instance of the given data type or subtype + and producing the corresponding representation node. + """ + Dumper.add_multi_representer(data_type, multi_representer) + +class YAMLObjectMetaclass(type): + """ + The metaclass for YAMLObject. + """ + def __init__(cls, name, bases, kwds): + super(YAMLObjectMetaclass, cls).__init__(name, bases, kwds) + if 'yaml_tag' in kwds and kwds['yaml_tag'] is not None: + if isinstance(cls.yaml_loader, list): + for loader in cls.yaml_loader: + loader.add_constructor(cls.yaml_tag, cls.from_yaml) + else: + cls.yaml_loader.add_constructor(cls.yaml_tag, cls.from_yaml) + + cls.yaml_dumper.add_representer(cls, cls.to_yaml) + +class YAMLObject(metaclass=YAMLObjectMetaclass): + """ + An object that can dump itself to a YAML stream + and load itself from a YAML stream. + """ + + __slots__ = () # no direct instantiation, so allow immutable subclasses + + yaml_loader = [Loader, FullLoader, UnsafeLoader] + yaml_dumper = Dumper + + yaml_tag = None + yaml_flow_style = None + + @classmethod + def from_yaml(cls, loader, node): + """ + Convert a representation node to a Python object. + """ + return loader.construct_yaml_object(node, cls) + + @classmethod + def to_yaml(cls, dumper, data): + """ + Convert a Python object to a representation node. + """ + return dumper.represent_yaml_object(cls.yaml_tag, data, cls, + flow_style=cls.yaml_flow_style) + diff --git a/venv/lib/python3.12/site-packages/yaml/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/yaml/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..211421a1 Binary files /dev/null and b/venv/lib/python3.12/site-packages/yaml/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/yaml/__pycache__/composer.cpython-312.pyc b/venv/lib/python3.12/site-packages/yaml/__pycache__/composer.cpython-312.pyc new file mode 100644 index 00000000..1dd89882 Binary files /dev/null and b/venv/lib/python3.12/site-packages/yaml/__pycache__/composer.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/yaml/__pycache__/constructor.cpython-312.pyc b/venv/lib/python3.12/site-packages/yaml/__pycache__/constructor.cpython-312.pyc new file mode 100644 index 00000000..2cffa09b Binary files /dev/null and b/venv/lib/python3.12/site-packages/yaml/__pycache__/constructor.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/yaml/__pycache__/cyaml.cpython-312.pyc b/venv/lib/python3.12/site-packages/yaml/__pycache__/cyaml.cpython-312.pyc new file mode 100644 index 00000000..7e78d0ab Binary files /dev/null and b/venv/lib/python3.12/site-packages/yaml/__pycache__/cyaml.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/yaml/__pycache__/dumper.cpython-312.pyc b/venv/lib/python3.12/site-packages/yaml/__pycache__/dumper.cpython-312.pyc new file mode 100644 index 00000000..b831bc33 Binary files /dev/null and b/venv/lib/python3.12/site-packages/yaml/__pycache__/dumper.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/yaml/__pycache__/emitter.cpython-312.pyc b/venv/lib/python3.12/site-packages/yaml/__pycache__/emitter.cpython-312.pyc new file mode 100644 index 00000000..bd978e7a Binary files /dev/null and b/venv/lib/python3.12/site-packages/yaml/__pycache__/emitter.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/yaml/__pycache__/error.cpython-312.pyc b/venv/lib/python3.12/site-packages/yaml/__pycache__/error.cpython-312.pyc new file mode 100644 index 00000000..5df94886 Binary files /dev/null and b/venv/lib/python3.12/site-packages/yaml/__pycache__/error.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/yaml/__pycache__/events.cpython-312.pyc b/venv/lib/python3.12/site-packages/yaml/__pycache__/events.cpython-312.pyc new file mode 100644 index 00000000..6061460b Binary files /dev/null and b/venv/lib/python3.12/site-packages/yaml/__pycache__/events.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/yaml/__pycache__/loader.cpython-312.pyc b/venv/lib/python3.12/site-packages/yaml/__pycache__/loader.cpython-312.pyc new file mode 100644 index 00000000..527e4d4b Binary files /dev/null and b/venv/lib/python3.12/site-packages/yaml/__pycache__/loader.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/yaml/__pycache__/nodes.cpython-312.pyc b/venv/lib/python3.12/site-packages/yaml/__pycache__/nodes.cpython-312.pyc new file mode 100644 index 00000000..9adacc43 Binary files /dev/null and b/venv/lib/python3.12/site-packages/yaml/__pycache__/nodes.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/yaml/__pycache__/parser.cpython-312.pyc b/venv/lib/python3.12/site-packages/yaml/__pycache__/parser.cpython-312.pyc new file mode 100644 index 00000000..705851d6 Binary files /dev/null and b/venv/lib/python3.12/site-packages/yaml/__pycache__/parser.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/yaml/__pycache__/reader.cpython-312.pyc b/venv/lib/python3.12/site-packages/yaml/__pycache__/reader.cpython-312.pyc new file mode 100644 index 00000000..3423640d Binary files /dev/null and b/venv/lib/python3.12/site-packages/yaml/__pycache__/reader.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/yaml/__pycache__/representer.cpython-312.pyc b/venv/lib/python3.12/site-packages/yaml/__pycache__/representer.cpython-312.pyc new file mode 100644 index 00000000..377b1d6c Binary files /dev/null and b/venv/lib/python3.12/site-packages/yaml/__pycache__/representer.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/yaml/__pycache__/resolver.cpython-312.pyc b/venv/lib/python3.12/site-packages/yaml/__pycache__/resolver.cpython-312.pyc new file mode 100644 index 00000000..36671990 Binary files /dev/null and b/venv/lib/python3.12/site-packages/yaml/__pycache__/resolver.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/yaml/__pycache__/scanner.cpython-312.pyc b/venv/lib/python3.12/site-packages/yaml/__pycache__/scanner.cpython-312.pyc new file mode 100644 index 00000000..1d37a566 Binary files /dev/null and b/venv/lib/python3.12/site-packages/yaml/__pycache__/scanner.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/yaml/__pycache__/serializer.cpython-312.pyc b/venv/lib/python3.12/site-packages/yaml/__pycache__/serializer.cpython-312.pyc new file mode 100644 index 00000000..40b1f529 Binary files /dev/null and b/venv/lib/python3.12/site-packages/yaml/__pycache__/serializer.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/yaml/__pycache__/tokens.cpython-312.pyc b/venv/lib/python3.12/site-packages/yaml/__pycache__/tokens.cpython-312.pyc new file mode 100644 index 00000000..a85ac9b9 Binary files /dev/null and b/venv/lib/python3.12/site-packages/yaml/__pycache__/tokens.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/yaml/_yaml.cpython-312-x86_64-linux-gnu.so b/venv/lib/python3.12/site-packages/yaml/_yaml.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 00000000..3673689c Binary files /dev/null and b/venv/lib/python3.12/site-packages/yaml/_yaml.cpython-312-x86_64-linux-gnu.so differ diff --git a/venv/lib/python3.12/site-packages/yaml/composer.py b/venv/lib/python3.12/site-packages/yaml/composer.py new file mode 100644 index 00000000..6d15cb40 --- /dev/null +++ b/venv/lib/python3.12/site-packages/yaml/composer.py @@ -0,0 +1,139 @@ + +__all__ = ['Composer', 'ComposerError'] + +from .error import MarkedYAMLError +from .events import * +from .nodes import * + +class ComposerError(MarkedYAMLError): + pass + +class Composer: + + def __init__(self): + self.anchors = {} + + def check_node(self): + # Drop the STREAM-START event. + if self.check_event(StreamStartEvent): + self.get_event() + + # If there are more documents available? + return not self.check_event(StreamEndEvent) + + def get_node(self): + # Get the root node of the next document. + if not self.check_event(StreamEndEvent): + return self.compose_document() + + def get_single_node(self): + # Drop the STREAM-START event. + self.get_event() + + # Compose a document if the stream is not empty. + document = None + if not self.check_event(StreamEndEvent): + document = self.compose_document() + + # Ensure that the stream contains no more documents. + if not self.check_event(StreamEndEvent): + event = self.get_event() + raise ComposerError("expected a single document in the stream", + document.start_mark, "but found another document", + event.start_mark) + + # Drop the STREAM-END event. + self.get_event() + + return document + + def compose_document(self): + # Drop the DOCUMENT-START event. + self.get_event() + + # Compose the root node. + node = self.compose_node(None, None) + + # Drop the DOCUMENT-END event. + self.get_event() + + self.anchors = {} + return node + + def compose_node(self, parent, index): + if self.check_event(AliasEvent): + event = self.get_event() + anchor = event.anchor + if anchor not in self.anchors: + raise ComposerError(None, None, "found undefined alias %r" + % anchor, event.start_mark) + return self.anchors[anchor] + event = self.peek_event() + anchor = event.anchor + if anchor is not None: + if anchor in self.anchors: + raise ComposerError("found duplicate anchor %r; first occurrence" + % anchor, self.anchors[anchor].start_mark, + "second occurrence", event.start_mark) + self.descend_resolver(parent, index) + if self.check_event(ScalarEvent): + node = self.compose_scalar_node(anchor) + elif self.check_event(SequenceStartEvent): + node = self.compose_sequence_node(anchor) + elif self.check_event(MappingStartEvent): + node = self.compose_mapping_node(anchor) + self.ascend_resolver() + return node + + def compose_scalar_node(self, anchor): + event = self.get_event() + tag = event.tag + if tag is None or tag == '!': + tag = self.resolve(ScalarNode, event.value, event.implicit) + node = ScalarNode(tag, event.value, + event.start_mark, event.end_mark, style=event.style) + if anchor is not None: + self.anchors[anchor] = node + return node + + def compose_sequence_node(self, anchor): + start_event = self.get_event() + tag = start_event.tag + if tag is None or tag == '!': + tag = self.resolve(SequenceNode, None, start_event.implicit) + node = SequenceNode(tag, [], + start_event.start_mark, None, + flow_style=start_event.flow_style) + if anchor is not None: + self.anchors[anchor] = node + index = 0 + while not self.check_event(SequenceEndEvent): + node.value.append(self.compose_node(node, index)) + index += 1 + end_event = self.get_event() + node.end_mark = end_event.end_mark + return node + + def compose_mapping_node(self, anchor): + start_event = self.get_event() + tag = start_event.tag + if tag is None or tag == '!': + tag = self.resolve(MappingNode, None, start_event.implicit) + node = MappingNode(tag, [], + start_event.start_mark, None, + flow_style=start_event.flow_style) + if anchor is not None: + self.anchors[anchor] = node + while not self.check_event(MappingEndEvent): + #key_event = self.peek_event() + item_key = self.compose_node(node, None) + #if item_key in node.value: + # raise ComposerError("while composing a mapping", start_event.start_mark, + # "found duplicate key", key_event.start_mark) + item_value = self.compose_node(node, item_key) + #node.value[item_key] = item_value + node.value.append((item_key, item_value)) + end_event = self.get_event() + node.end_mark = end_event.end_mark + return node + diff --git a/venv/lib/python3.12/site-packages/yaml/constructor.py b/venv/lib/python3.12/site-packages/yaml/constructor.py new file mode 100644 index 00000000..619acd30 --- /dev/null +++ b/venv/lib/python3.12/site-packages/yaml/constructor.py @@ -0,0 +1,748 @@ + +__all__ = [ + 'BaseConstructor', + 'SafeConstructor', + 'FullConstructor', + 'UnsafeConstructor', + 'Constructor', + 'ConstructorError' +] + +from .error import * +from .nodes import * + +import collections.abc, datetime, base64, binascii, re, sys, types + +class ConstructorError(MarkedYAMLError): + pass + +class BaseConstructor: + + yaml_constructors = {} + yaml_multi_constructors = {} + + def __init__(self): + self.constructed_objects = {} + self.recursive_objects = {} + self.state_generators = [] + self.deep_construct = False + + def check_data(self): + # If there are more documents available? + return self.check_node() + + def check_state_key(self, key): + """Block special attributes/methods from being set in a newly created + object, to prevent user-controlled methods from being called during + deserialization""" + if self.get_state_keys_blacklist_regexp().match(key): + raise ConstructorError(None, None, + "blacklisted key '%s' in instance state found" % (key,), None) + + def get_data(self): + # Construct and return the next document. + if self.check_node(): + return self.construct_document(self.get_node()) + + def get_single_data(self): + # Ensure that the stream contains a single document and construct it. + node = self.get_single_node() + if node is not None: + return self.construct_document(node) + return None + + def construct_document(self, node): + data = self.construct_object(node) + while self.state_generators: + state_generators = self.state_generators + self.state_generators = [] + for generator in state_generators: + for dummy in generator: + pass + self.constructed_objects = {} + self.recursive_objects = {} + self.deep_construct = False + return data + + def construct_object(self, node, deep=False): + if node in self.constructed_objects: + return self.constructed_objects[node] + if deep: + old_deep = self.deep_construct + self.deep_construct = True + if node in self.recursive_objects: + raise ConstructorError(None, None, + "found unconstructable recursive node", node.start_mark) + self.recursive_objects[node] = None + constructor = None + tag_suffix = None + if node.tag in self.yaml_constructors: + constructor = self.yaml_constructors[node.tag] + else: + for tag_prefix in self.yaml_multi_constructors: + if tag_prefix is not None and node.tag.startswith(tag_prefix): + tag_suffix = node.tag[len(tag_prefix):] + constructor = self.yaml_multi_constructors[tag_prefix] + break + else: + if None in self.yaml_multi_constructors: + tag_suffix = node.tag + constructor = self.yaml_multi_constructors[None] + elif None in self.yaml_constructors: + constructor = self.yaml_constructors[None] + elif isinstance(node, ScalarNode): + constructor = self.__class__.construct_scalar + elif isinstance(node, SequenceNode): + constructor = self.__class__.construct_sequence + elif isinstance(node, MappingNode): + constructor = self.__class__.construct_mapping + if tag_suffix is None: + data = constructor(self, node) + else: + data = constructor(self, tag_suffix, node) + if isinstance(data, types.GeneratorType): + generator = data + data = next(generator) + if self.deep_construct: + for dummy in generator: + pass + else: + self.state_generators.append(generator) + self.constructed_objects[node] = data + del self.recursive_objects[node] + if deep: + self.deep_construct = old_deep + return data + + def construct_scalar(self, node): + if not isinstance(node, ScalarNode): + raise ConstructorError(None, None, + "expected a scalar node, but found %s" % node.id, + node.start_mark) + return node.value + + def construct_sequence(self, node, deep=False): + if not isinstance(node, SequenceNode): + raise ConstructorError(None, None, + "expected a sequence node, but found %s" % node.id, + node.start_mark) + return [self.construct_object(child, deep=deep) + for child in node.value] + + def construct_mapping(self, node, deep=False): + if not isinstance(node, MappingNode): + raise ConstructorError(None, None, + "expected a mapping node, but found %s" % node.id, + node.start_mark) + mapping = {} + for key_node, value_node in node.value: + key = self.construct_object(key_node, deep=deep) + if not isinstance(key, collections.abc.Hashable): + raise ConstructorError("while constructing a mapping", node.start_mark, + "found unhashable key", key_node.start_mark) + value = self.construct_object(value_node, deep=deep) + mapping[key] = value + return mapping + + def construct_pairs(self, node, deep=False): + if not isinstance(node, MappingNode): + raise ConstructorError(None, None, + "expected a mapping node, but found %s" % node.id, + node.start_mark) + pairs = [] + for key_node, value_node in node.value: + key = self.construct_object(key_node, deep=deep) + value = self.construct_object(value_node, deep=deep) + pairs.append((key, value)) + return pairs + + @classmethod + def add_constructor(cls, tag, constructor): + if not 'yaml_constructors' in cls.__dict__: + cls.yaml_constructors = cls.yaml_constructors.copy() + cls.yaml_constructors[tag] = constructor + + @classmethod + def add_multi_constructor(cls, tag_prefix, multi_constructor): + if not 'yaml_multi_constructors' in cls.__dict__: + cls.yaml_multi_constructors = cls.yaml_multi_constructors.copy() + cls.yaml_multi_constructors[tag_prefix] = multi_constructor + +class SafeConstructor(BaseConstructor): + + def construct_scalar(self, node): + if isinstance(node, MappingNode): + for key_node, value_node in node.value: + if key_node.tag == 'tag:yaml.org,2002:value': + return self.construct_scalar(value_node) + return super().construct_scalar(node) + + def flatten_mapping(self, node): + merge = [] + index = 0 + while index < len(node.value): + key_node, value_node = node.value[index] + if key_node.tag == 'tag:yaml.org,2002:merge': + del node.value[index] + if isinstance(value_node, MappingNode): + self.flatten_mapping(value_node) + merge.extend(value_node.value) + elif isinstance(value_node, SequenceNode): + submerge = [] + for subnode in value_node.value: + if not isinstance(subnode, MappingNode): + raise ConstructorError("while constructing a mapping", + node.start_mark, + "expected a mapping for merging, but found %s" + % subnode.id, subnode.start_mark) + self.flatten_mapping(subnode) + submerge.append(subnode.value) + submerge.reverse() + for value in submerge: + merge.extend(value) + else: + raise ConstructorError("while constructing a mapping", node.start_mark, + "expected a mapping or list of mappings for merging, but found %s" + % value_node.id, value_node.start_mark) + elif key_node.tag == 'tag:yaml.org,2002:value': + key_node.tag = 'tag:yaml.org,2002:str' + index += 1 + else: + index += 1 + if merge: + node.value = merge + node.value + + def construct_mapping(self, node, deep=False): + if isinstance(node, MappingNode): + self.flatten_mapping(node) + return super().construct_mapping(node, deep=deep) + + def construct_yaml_null(self, node): + self.construct_scalar(node) + return None + + bool_values = { + 'yes': True, + 'no': False, + 'true': True, + 'false': False, + 'on': True, + 'off': False, + } + + def construct_yaml_bool(self, node): + value = self.construct_scalar(node) + return self.bool_values[value.lower()] + + def construct_yaml_int(self, node): + value = self.construct_scalar(node) + value = value.replace('_', '') + sign = +1 + if value[0] == '-': + sign = -1 + if value[0] in '+-': + value = value[1:] + if value == '0': + return 0 + elif value.startswith('0b'): + return sign*int(value[2:], 2) + elif value.startswith('0x'): + return sign*int(value[2:], 16) + elif value[0] == '0': + return sign*int(value, 8) + elif ':' in value: + digits = [int(part) for part in value.split(':')] + digits.reverse() + base = 1 + value = 0 + for digit in digits: + value += digit*base + base *= 60 + return sign*value + else: + return sign*int(value) + + inf_value = 1e300 + while inf_value != inf_value*inf_value: + inf_value *= inf_value + nan_value = -inf_value/inf_value # Trying to make a quiet NaN (like C99). + + def construct_yaml_float(self, node): + value = self.construct_scalar(node) + value = value.replace('_', '').lower() + sign = +1 + if value[0] == '-': + sign = -1 + if value[0] in '+-': + value = value[1:] + if value == '.inf': + return sign*self.inf_value + elif value == '.nan': + return self.nan_value + elif ':' in value: + digits = [float(part) for part in value.split(':')] + digits.reverse() + base = 1 + value = 0.0 + for digit in digits: + value += digit*base + base *= 60 + return sign*value + else: + return sign*float(value) + + def construct_yaml_binary(self, node): + try: + value = self.construct_scalar(node).encode('ascii') + except UnicodeEncodeError as exc: + raise ConstructorError(None, None, + "failed to convert base64 data into ascii: %s" % exc, + node.start_mark) + try: + if hasattr(base64, 'decodebytes'): + return base64.decodebytes(value) + else: + return base64.decodestring(value) + except binascii.Error as exc: + raise ConstructorError(None, None, + "failed to decode base64 data: %s" % exc, node.start_mark) + + timestamp_regexp = re.compile( + r'''^(?P[0-9][0-9][0-9][0-9]) + -(?P[0-9][0-9]?) + -(?P[0-9][0-9]?) + (?:(?:[Tt]|[ \t]+) + (?P[0-9][0-9]?) + :(?P[0-9][0-9]) + :(?P[0-9][0-9]) + (?:\.(?P[0-9]*))? + (?:[ \t]*(?PZ|(?P[-+])(?P[0-9][0-9]?) + (?::(?P[0-9][0-9]))?))?)?$''', re.X) + + def construct_yaml_timestamp(self, node): + value = self.construct_scalar(node) + match = self.timestamp_regexp.match(node.value) + values = match.groupdict() + year = int(values['year']) + month = int(values['month']) + day = int(values['day']) + if not values['hour']: + return datetime.date(year, month, day) + hour = int(values['hour']) + minute = int(values['minute']) + second = int(values['second']) + fraction = 0 + tzinfo = None + if values['fraction']: + fraction = values['fraction'][:6] + while len(fraction) < 6: + fraction += '0' + fraction = int(fraction) + if values['tz_sign']: + tz_hour = int(values['tz_hour']) + tz_minute = int(values['tz_minute'] or 0) + delta = datetime.timedelta(hours=tz_hour, minutes=tz_minute) + if values['tz_sign'] == '-': + delta = -delta + tzinfo = datetime.timezone(delta) + elif values['tz']: + tzinfo = datetime.timezone.utc + return datetime.datetime(year, month, day, hour, minute, second, fraction, + tzinfo=tzinfo) + + def construct_yaml_omap(self, node): + # Note: we do not check for duplicate keys, because it's too + # CPU-expensive. + omap = [] + yield omap + if not isinstance(node, SequenceNode): + raise ConstructorError("while constructing an ordered map", node.start_mark, + "expected a sequence, but found %s" % node.id, node.start_mark) + for subnode in node.value: + if not isinstance(subnode, MappingNode): + raise ConstructorError("while constructing an ordered map", node.start_mark, + "expected a mapping of length 1, but found %s" % subnode.id, + subnode.start_mark) + if len(subnode.value) != 1: + raise ConstructorError("while constructing an ordered map", node.start_mark, + "expected a single mapping item, but found %d items" % len(subnode.value), + subnode.start_mark) + key_node, value_node = subnode.value[0] + key = self.construct_object(key_node) + value = self.construct_object(value_node) + omap.append((key, value)) + + def construct_yaml_pairs(self, node): + # Note: the same code as `construct_yaml_omap`. + pairs = [] + yield pairs + if not isinstance(node, SequenceNode): + raise ConstructorError("while constructing pairs", node.start_mark, + "expected a sequence, but found %s" % node.id, node.start_mark) + for subnode in node.value: + if not isinstance(subnode, MappingNode): + raise ConstructorError("while constructing pairs", node.start_mark, + "expected a mapping of length 1, but found %s" % subnode.id, + subnode.start_mark) + if len(subnode.value) != 1: + raise ConstructorError("while constructing pairs", node.start_mark, + "expected a single mapping item, but found %d items" % len(subnode.value), + subnode.start_mark) + key_node, value_node = subnode.value[0] + key = self.construct_object(key_node) + value = self.construct_object(value_node) + pairs.append((key, value)) + + def construct_yaml_set(self, node): + data = set() + yield data + value = self.construct_mapping(node) + data.update(value) + + def construct_yaml_str(self, node): + return self.construct_scalar(node) + + def construct_yaml_seq(self, node): + data = [] + yield data + data.extend(self.construct_sequence(node)) + + def construct_yaml_map(self, node): + data = {} + yield data + value = self.construct_mapping(node) + data.update(value) + + def construct_yaml_object(self, node, cls): + data = cls.__new__(cls) + yield data + if hasattr(data, '__setstate__'): + state = self.construct_mapping(node, deep=True) + data.__setstate__(state) + else: + state = self.construct_mapping(node) + data.__dict__.update(state) + + def construct_undefined(self, node): + raise ConstructorError(None, None, + "could not determine a constructor for the tag %r" % node.tag, + node.start_mark) + +SafeConstructor.add_constructor( + 'tag:yaml.org,2002:null', + SafeConstructor.construct_yaml_null) + +SafeConstructor.add_constructor( + 'tag:yaml.org,2002:bool', + SafeConstructor.construct_yaml_bool) + +SafeConstructor.add_constructor( + 'tag:yaml.org,2002:int', + SafeConstructor.construct_yaml_int) + +SafeConstructor.add_constructor( + 'tag:yaml.org,2002:float', + SafeConstructor.construct_yaml_float) + +SafeConstructor.add_constructor( + 'tag:yaml.org,2002:binary', + SafeConstructor.construct_yaml_binary) + +SafeConstructor.add_constructor( + 'tag:yaml.org,2002:timestamp', + SafeConstructor.construct_yaml_timestamp) + +SafeConstructor.add_constructor( + 'tag:yaml.org,2002:omap', + SafeConstructor.construct_yaml_omap) + +SafeConstructor.add_constructor( + 'tag:yaml.org,2002:pairs', + SafeConstructor.construct_yaml_pairs) + +SafeConstructor.add_constructor( + 'tag:yaml.org,2002:set', + SafeConstructor.construct_yaml_set) + +SafeConstructor.add_constructor( + 'tag:yaml.org,2002:str', + SafeConstructor.construct_yaml_str) + +SafeConstructor.add_constructor( + 'tag:yaml.org,2002:seq', + SafeConstructor.construct_yaml_seq) + +SafeConstructor.add_constructor( + 'tag:yaml.org,2002:map', + SafeConstructor.construct_yaml_map) + +SafeConstructor.add_constructor(None, + SafeConstructor.construct_undefined) + +class FullConstructor(SafeConstructor): + # 'extend' is blacklisted because it is used by + # construct_python_object_apply to add `listitems` to a newly generate + # python instance + def get_state_keys_blacklist(self): + return ['^extend$', '^__.*__$'] + + def get_state_keys_blacklist_regexp(self): + if not hasattr(self, 'state_keys_blacklist_regexp'): + self.state_keys_blacklist_regexp = re.compile('(' + '|'.join(self.get_state_keys_blacklist()) + ')') + return self.state_keys_blacklist_regexp + + def construct_python_str(self, node): + return self.construct_scalar(node) + + def construct_python_unicode(self, node): + return self.construct_scalar(node) + + def construct_python_bytes(self, node): + try: + value = self.construct_scalar(node).encode('ascii') + except UnicodeEncodeError as exc: + raise ConstructorError(None, None, + "failed to convert base64 data into ascii: %s" % exc, + node.start_mark) + try: + if hasattr(base64, 'decodebytes'): + return base64.decodebytes(value) + else: + return base64.decodestring(value) + except binascii.Error as exc: + raise ConstructorError(None, None, + "failed to decode base64 data: %s" % exc, node.start_mark) + + def construct_python_long(self, node): + return self.construct_yaml_int(node) + + def construct_python_complex(self, node): + return complex(self.construct_scalar(node)) + + def construct_python_tuple(self, node): + return tuple(self.construct_sequence(node)) + + def find_python_module(self, name, mark, unsafe=False): + if not name: + raise ConstructorError("while constructing a Python module", mark, + "expected non-empty name appended to the tag", mark) + if unsafe: + try: + __import__(name) + except ImportError as exc: + raise ConstructorError("while constructing a Python module", mark, + "cannot find module %r (%s)" % (name, exc), mark) + if name not in sys.modules: + raise ConstructorError("while constructing a Python module", mark, + "module %r is not imported" % name, mark) + return sys.modules[name] + + def find_python_name(self, name, mark, unsafe=False): + if not name: + raise ConstructorError("while constructing a Python object", mark, + "expected non-empty name appended to the tag", mark) + if '.' in name: + module_name, object_name = name.rsplit('.', 1) + else: + module_name = 'builtins' + object_name = name + if unsafe: + try: + __import__(module_name) + except ImportError as exc: + raise ConstructorError("while constructing a Python object", mark, + "cannot find module %r (%s)" % (module_name, exc), mark) + if module_name not in sys.modules: + raise ConstructorError("while constructing a Python object", mark, + "module %r is not imported" % module_name, mark) + module = sys.modules[module_name] + if not hasattr(module, object_name): + raise ConstructorError("while constructing a Python object", mark, + "cannot find %r in the module %r" + % (object_name, module.__name__), mark) + return getattr(module, object_name) + + def construct_python_name(self, suffix, node): + value = self.construct_scalar(node) + if value: + raise ConstructorError("while constructing a Python name", node.start_mark, + "expected the empty value, but found %r" % value, node.start_mark) + return self.find_python_name(suffix, node.start_mark) + + def construct_python_module(self, suffix, node): + value = self.construct_scalar(node) + if value: + raise ConstructorError("while constructing a Python module", node.start_mark, + "expected the empty value, but found %r" % value, node.start_mark) + return self.find_python_module(suffix, node.start_mark) + + def make_python_instance(self, suffix, node, + args=None, kwds=None, newobj=False, unsafe=False): + if not args: + args = [] + if not kwds: + kwds = {} + cls = self.find_python_name(suffix, node.start_mark) + if not (unsafe or isinstance(cls, type)): + raise ConstructorError("while constructing a Python instance", node.start_mark, + "expected a class, but found %r" % type(cls), + node.start_mark) + if newobj and isinstance(cls, type): + return cls.__new__(cls, *args, **kwds) + else: + return cls(*args, **kwds) + + def set_python_instance_state(self, instance, state, unsafe=False): + if hasattr(instance, '__setstate__'): + instance.__setstate__(state) + else: + slotstate = {} + if isinstance(state, tuple) and len(state) == 2: + state, slotstate = state + if hasattr(instance, '__dict__'): + if not unsafe and state: + for key in state.keys(): + self.check_state_key(key) + instance.__dict__.update(state) + elif state: + slotstate.update(state) + for key, value in slotstate.items(): + if not unsafe: + self.check_state_key(key) + setattr(instance, key, value) + + def construct_python_object(self, suffix, node): + # Format: + # !!python/object:module.name { ... state ... } + instance = self.make_python_instance(suffix, node, newobj=True) + yield instance + deep = hasattr(instance, '__setstate__') + state = self.construct_mapping(node, deep=deep) + self.set_python_instance_state(instance, state) + + def construct_python_object_apply(self, suffix, node, newobj=False): + # Format: + # !!python/object/apply # (or !!python/object/new) + # args: [ ... arguments ... ] + # kwds: { ... keywords ... } + # state: ... state ... + # listitems: [ ... listitems ... ] + # dictitems: { ... dictitems ... } + # or short format: + # !!python/object/apply [ ... arguments ... ] + # The difference between !!python/object/apply and !!python/object/new + # is how an object is created, check make_python_instance for details. + if isinstance(node, SequenceNode): + args = self.construct_sequence(node, deep=True) + kwds = {} + state = {} + listitems = [] + dictitems = {} + else: + value = self.construct_mapping(node, deep=True) + args = value.get('args', []) + kwds = value.get('kwds', {}) + state = value.get('state', {}) + listitems = value.get('listitems', []) + dictitems = value.get('dictitems', {}) + instance = self.make_python_instance(suffix, node, args, kwds, newobj) + if state: + self.set_python_instance_state(instance, state) + if listitems: + instance.extend(listitems) + if dictitems: + for key in dictitems: + instance[key] = dictitems[key] + return instance + + def construct_python_object_new(self, suffix, node): + return self.construct_python_object_apply(suffix, node, newobj=True) + +FullConstructor.add_constructor( + 'tag:yaml.org,2002:python/none', + FullConstructor.construct_yaml_null) + +FullConstructor.add_constructor( + 'tag:yaml.org,2002:python/bool', + FullConstructor.construct_yaml_bool) + +FullConstructor.add_constructor( + 'tag:yaml.org,2002:python/str', + FullConstructor.construct_python_str) + +FullConstructor.add_constructor( + 'tag:yaml.org,2002:python/unicode', + FullConstructor.construct_python_unicode) + +FullConstructor.add_constructor( + 'tag:yaml.org,2002:python/bytes', + FullConstructor.construct_python_bytes) + +FullConstructor.add_constructor( + 'tag:yaml.org,2002:python/int', + FullConstructor.construct_yaml_int) + +FullConstructor.add_constructor( + 'tag:yaml.org,2002:python/long', + FullConstructor.construct_python_long) + +FullConstructor.add_constructor( + 'tag:yaml.org,2002:python/float', + FullConstructor.construct_yaml_float) + +FullConstructor.add_constructor( + 'tag:yaml.org,2002:python/complex', + FullConstructor.construct_python_complex) + +FullConstructor.add_constructor( + 'tag:yaml.org,2002:python/list', + FullConstructor.construct_yaml_seq) + +FullConstructor.add_constructor( + 'tag:yaml.org,2002:python/tuple', + FullConstructor.construct_python_tuple) + +FullConstructor.add_constructor( + 'tag:yaml.org,2002:python/dict', + FullConstructor.construct_yaml_map) + +FullConstructor.add_multi_constructor( + 'tag:yaml.org,2002:python/name:', + FullConstructor.construct_python_name) + +class UnsafeConstructor(FullConstructor): + + def find_python_module(self, name, mark): + return super(UnsafeConstructor, self).find_python_module(name, mark, unsafe=True) + + def find_python_name(self, name, mark): + return super(UnsafeConstructor, self).find_python_name(name, mark, unsafe=True) + + def make_python_instance(self, suffix, node, args=None, kwds=None, newobj=False): + return super(UnsafeConstructor, self).make_python_instance( + suffix, node, args, kwds, newobj, unsafe=True) + + def set_python_instance_state(self, instance, state): + return super(UnsafeConstructor, self).set_python_instance_state( + instance, state, unsafe=True) + +UnsafeConstructor.add_multi_constructor( + 'tag:yaml.org,2002:python/module:', + UnsafeConstructor.construct_python_module) + +UnsafeConstructor.add_multi_constructor( + 'tag:yaml.org,2002:python/object:', + UnsafeConstructor.construct_python_object) + +UnsafeConstructor.add_multi_constructor( + 'tag:yaml.org,2002:python/object/new:', + UnsafeConstructor.construct_python_object_new) + +UnsafeConstructor.add_multi_constructor( + 'tag:yaml.org,2002:python/object/apply:', + UnsafeConstructor.construct_python_object_apply) + +# Constructor is same as UnsafeConstructor. Need to leave this in place in case +# people have extended it directly. +class Constructor(UnsafeConstructor): + pass diff --git a/venv/lib/python3.12/site-packages/yaml/cyaml.py b/venv/lib/python3.12/site-packages/yaml/cyaml.py new file mode 100644 index 00000000..0c213458 --- /dev/null +++ b/venv/lib/python3.12/site-packages/yaml/cyaml.py @@ -0,0 +1,101 @@ + +__all__ = [ + 'CBaseLoader', 'CSafeLoader', 'CFullLoader', 'CUnsafeLoader', 'CLoader', + 'CBaseDumper', 'CSafeDumper', 'CDumper' +] + +from yaml._yaml import CParser, CEmitter + +from .constructor import * + +from .serializer import * +from .representer import * + +from .resolver import * + +class CBaseLoader(CParser, BaseConstructor, BaseResolver): + + def __init__(self, stream): + CParser.__init__(self, stream) + BaseConstructor.__init__(self) + BaseResolver.__init__(self) + +class CSafeLoader(CParser, SafeConstructor, Resolver): + + def __init__(self, stream): + CParser.__init__(self, stream) + SafeConstructor.__init__(self) + Resolver.__init__(self) + +class CFullLoader(CParser, FullConstructor, Resolver): + + def __init__(self, stream): + CParser.__init__(self, stream) + FullConstructor.__init__(self) + Resolver.__init__(self) + +class CUnsafeLoader(CParser, UnsafeConstructor, Resolver): + + def __init__(self, stream): + CParser.__init__(self, stream) + UnsafeConstructor.__init__(self) + Resolver.__init__(self) + +class CLoader(CParser, Constructor, Resolver): + + def __init__(self, stream): + CParser.__init__(self, stream) + Constructor.__init__(self) + Resolver.__init__(self) + +class CBaseDumper(CEmitter, BaseRepresenter, BaseResolver): + + def __init__(self, stream, + default_style=None, default_flow_style=False, + canonical=None, indent=None, width=None, + allow_unicode=None, line_break=None, + encoding=None, explicit_start=None, explicit_end=None, + version=None, tags=None, sort_keys=True): + CEmitter.__init__(self, stream, canonical=canonical, + indent=indent, width=width, encoding=encoding, + allow_unicode=allow_unicode, line_break=line_break, + explicit_start=explicit_start, explicit_end=explicit_end, + version=version, tags=tags) + Representer.__init__(self, default_style=default_style, + default_flow_style=default_flow_style, sort_keys=sort_keys) + Resolver.__init__(self) + +class CSafeDumper(CEmitter, SafeRepresenter, Resolver): + + def __init__(self, stream, + default_style=None, default_flow_style=False, + canonical=None, indent=None, width=None, + allow_unicode=None, line_break=None, + encoding=None, explicit_start=None, explicit_end=None, + version=None, tags=None, sort_keys=True): + CEmitter.__init__(self, stream, canonical=canonical, + indent=indent, width=width, encoding=encoding, + allow_unicode=allow_unicode, line_break=line_break, + explicit_start=explicit_start, explicit_end=explicit_end, + version=version, tags=tags) + SafeRepresenter.__init__(self, default_style=default_style, + default_flow_style=default_flow_style, sort_keys=sort_keys) + Resolver.__init__(self) + +class CDumper(CEmitter, Serializer, Representer, Resolver): + + def __init__(self, stream, + default_style=None, default_flow_style=False, + canonical=None, indent=None, width=None, + allow_unicode=None, line_break=None, + encoding=None, explicit_start=None, explicit_end=None, + version=None, tags=None, sort_keys=True): + CEmitter.__init__(self, stream, canonical=canonical, + indent=indent, width=width, encoding=encoding, + allow_unicode=allow_unicode, line_break=line_break, + explicit_start=explicit_start, explicit_end=explicit_end, + version=version, tags=tags) + Representer.__init__(self, default_style=default_style, + default_flow_style=default_flow_style, sort_keys=sort_keys) + Resolver.__init__(self) + diff --git a/venv/lib/python3.12/site-packages/yaml/dumper.py b/venv/lib/python3.12/site-packages/yaml/dumper.py new file mode 100644 index 00000000..6aadba55 --- /dev/null +++ b/venv/lib/python3.12/site-packages/yaml/dumper.py @@ -0,0 +1,62 @@ + +__all__ = ['BaseDumper', 'SafeDumper', 'Dumper'] + +from .emitter import * +from .serializer import * +from .representer import * +from .resolver import * + +class BaseDumper(Emitter, Serializer, BaseRepresenter, BaseResolver): + + def __init__(self, stream, + default_style=None, default_flow_style=False, + canonical=None, indent=None, width=None, + allow_unicode=None, line_break=None, + encoding=None, explicit_start=None, explicit_end=None, + version=None, tags=None, sort_keys=True): + Emitter.__init__(self, stream, canonical=canonical, + indent=indent, width=width, + allow_unicode=allow_unicode, line_break=line_break) + Serializer.__init__(self, encoding=encoding, + explicit_start=explicit_start, explicit_end=explicit_end, + version=version, tags=tags) + Representer.__init__(self, default_style=default_style, + default_flow_style=default_flow_style, sort_keys=sort_keys) + Resolver.__init__(self) + +class SafeDumper(Emitter, Serializer, SafeRepresenter, Resolver): + + def __init__(self, stream, + default_style=None, default_flow_style=False, + canonical=None, indent=None, width=None, + allow_unicode=None, line_break=None, + encoding=None, explicit_start=None, explicit_end=None, + version=None, tags=None, sort_keys=True): + Emitter.__init__(self, stream, canonical=canonical, + indent=indent, width=width, + allow_unicode=allow_unicode, line_break=line_break) + Serializer.__init__(self, encoding=encoding, + explicit_start=explicit_start, explicit_end=explicit_end, + version=version, tags=tags) + SafeRepresenter.__init__(self, default_style=default_style, + default_flow_style=default_flow_style, sort_keys=sort_keys) + Resolver.__init__(self) + +class Dumper(Emitter, Serializer, Representer, Resolver): + + def __init__(self, stream, + default_style=None, default_flow_style=False, + canonical=None, indent=None, width=None, + allow_unicode=None, line_break=None, + encoding=None, explicit_start=None, explicit_end=None, + version=None, tags=None, sort_keys=True): + Emitter.__init__(self, stream, canonical=canonical, + indent=indent, width=width, + allow_unicode=allow_unicode, line_break=line_break) + Serializer.__init__(self, encoding=encoding, + explicit_start=explicit_start, explicit_end=explicit_end, + version=version, tags=tags) + Representer.__init__(self, default_style=default_style, + default_flow_style=default_flow_style, sort_keys=sort_keys) + Resolver.__init__(self) + diff --git a/venv/lib/python3.12/site-packages/yaml/emitter.py b/venv/lib/python3.12/site-packages/yaml/emitter.py new file mode 100644 index 00000000..a664d011 --- /dev/null +++ b/venv/lib/python3.12/site-packages/yaml/emitter.py @@ -0,0 +1,1137 @@ + +# Emitter expects events obeying the following grammar: +# stream ::= STREAM-START document* STREAM-END +# document ::= DOCUMENT-START node DOCUMENT-END +# node ::= SCALAR | sequence | mapping +# sequence ::= SEQUENCE-START node* SEQUENCE-END +# mapping ::= MAPPING-START (node node)* MAPPING-END + +__all__ = ['Emitter', 'EmitterError'] + +from .error import YAMLError +from .events import * + +class EmitterError(YAMLError): + pass + +class ScalarAnalysis: + def __init__(self, scalar, empty, multiline, + allow_flow_plain, allow_block_plain, + allow_single_quoted, allow_double_quoted, + allow_block): + self.scalar = scalar + self.empty = empty + self.multiline = multiline + self.allow_flow_plain = allow_flow_plain + self.allow_block_plain = allow_block_plain + self.allow_single_quoted = allow_single_quoted + self.allow_double_quoted = allow_double_quoted + self.allow_block = allow_block + +class Emitter: + + DEFAULT_TAG_PREFIXES = { + '!' : '!', + 'tag:yaml.org,2002:' : '!!', + } + + def __init__(self, stream, canonical=None, indent=None, width=None, + allow_unicode=None, line_break=None): + + # The stream should have the methods `write` and possibly `flush`. + self.stream = stream + + # Encoding can be overridden by STREAM-START. + self.encoding = None + + # Emitter is a state machine with a stack of states to handle nested + # structures. + self.states = [] + self.state = self.expect_stream_start + + # Current event and the event queue. + self.events = [] + self.event = None + + # The current indentation level and the stack of previous indents. + self.indents = [] + self.indent = None + + # Flow level. + self.flow_level = 0 + + # Contexts. + self.root_context = False + self.sequence_context = False + self.mapping_context = False + self.simple_key_context = False + + # Characteristics of the last emitted character: + # - current position. + # - is it a whitespace? + # - is it an indention character + # (indentation space, '-', '?', or ':')? + self.line = 0 + self.column = 0 + self.whitespace = True + self.indention = True + + # Whether the document requires an explicit document indicator + self.open_ended = False + + # Formatting details. + self.canonical = canonical + self.allow_unicode = allow_unicode + self.best_indent = 2 + if indent and 1 < indent < 10: + self.best_indent = indent + self.best_width = 80 + if width and width > self.best_indent*2: + self.best_width = width + self.best_line_break = '\n' + if line_break in ['\r', '\n', '\r\n']: + self.best_line_break = line_break + + # Tag prefixes. + self.tag_prefixes = None + + # Prepared anchor and tag. + self.prepared_anchor = None + self.prepared_tag = None + + # Scalar analysis and style. + self.analysis = None + self.style = None + + def dispose(self): + # Reset the state attributes (to clear self-references) + self.states = [] + self.state = None + + def emit(self, event): + self.events.append(event) + while not self.need_more_events(): + self.event = self.events.pop(0) + self.state() + self.event = None + + # In some cases, we wait for a few next events before emitting. + + def need_more_events(self): + if not self.events: + return True + event = self.events[0] + if isinstance(event, DocumentStartEvent): + return self.need_events(1) + elif isinstance(event, SequenceStartEvent): + return self.need_events(2) + elif isinstance(event, MappingStartEvent): + return self.need_events(3) + else: + return False + + def need_events(self, count): + level = 0 + for event in self.events[1:]: + if isinstance(event, (DocumentStartEvent, CollectionStartEvent)): + level += 1 + elif isinstance(event, (DocumentEndEvent, CollectionEndEvent)): + level -= 1 + elif isinstance(event, StreamEndEvent): + level = -1 + if level < 0: + return False + return (len(self.events) < count+1) + + def increase_indent(self, flow=False, indentless=False): + self.indents.append(self.indent) + if self.indent is None: + if flow: + self.indent = self.best_indent + else: + self.indent = 0 + elif not indentless: + self.indent += self.best_indent + + # States. + + # Stream handlers. + + def expect_stream_start(self): + if isinstance(self.event, StreamStartEvent): + if self.event.encoding and not hasattr(self.stream, 'encoding'): + self.encoding = self.event.encoding + self.write_stream_start() + self.state = self.expect_first_document_start + else: + raise EmitterError("expected StreamStartEvent, but got %s" + % self.event) + + def expect_nothing(self): + raise EmitterError("expected nothing, but got %s" % self.event) + + # Document handlers. + + def expect_first_document_start(self): + return self.expect_document_start(first=True) + + def expect_document_start(self, first=False): + if isinstance(self.event, DocumentStartEvent): + if (self.event.version or self.event.tags) and self.open_ended: + self.write_indicator('...', True) + self.write_indent() + if self.event.version: + version_text = self.prepare_version(self.event.version) + self.write_version_directive(version_text) + self.tag_prefixes = self.DEFAULT_TAG_PREFIXES.copy() + if self.event.tags: + handles = sorted(self.event.tags.keys()) + for handle in handles: + prefix = self.event.tags[handle] + self.tag_prefixes[prefix] = handle + handle_text = self.prepare_tag_handle(handle) + prefix_text = self.prepare_tag_prefix(prefix) + self.write_tag_directive(handle_text, prefix_text) + implicit = (first and not self.event.explicit and not self.canonical + and not self.event.version and not self.event.tags + and not self.check_empty_document()) + if not implicit: + self.write_indent() + self.write_indicator('---', True) + if self.canonical: + self.write_indent() + self.state = self.expect_document_root + elif isinstance(self.event, StreamEndEvent): + if self.open_ended: + self.write_indicator('...', True) + self.write_indent() + self.write_stream_end() + self.state = self.expect_nothing + else: + raise EmitterError("expected DocumentStartEvent, but got %s" + % self.event) + + def expect_document_end(self): + if isinstance(self.event, DocumentEndEvent): + self.write_indent() + if self.event.explicit: + self.write_indicator('...', True) + self.write_indent() + self.flush_stream() + self.state = self.expect_document_start + else: + raise EmitterError("expected DocumentEndEvent, but got %s" + % self.event) + + def expect_document_root(self): + self.states.append(self.expect_document_end) + self.expect_node(root=True) + + # Node handlers. + + def expect_node(self, root=False, sequence=False, mapping=False, + simple_key=False): + self.root_context = root + self.sequence_context = sequence + self.mapping_context = mapping + self.simple_key_context = simple_key + if isinstance(self.event, AliasEvent): + self.expect_alias() + elif isinstance(self.event, (ScalarEvent, CollectionStartEvent)): + self.process_anchor('&') + self.process_tag() + if isinstance(self.event, ScalarEvent): + self.expect_scalar() + elif isinstance(self.event, SequenceStartEvent): + if self.flow_level or self.canonical or self.event.flow_style \ + or self.check_empty_sequence(): + self.expect_flow_sequence() + else: + self.expect_block_sequence() + elif isinstance(self.event, MappingStartEvent): + if self.flow_level or self.canonical or self.event.flow_style \ + or self.check_empty_mapping(): + self.expect_flow_mapping() + else: + self.expect_block_mapping() + else: + raise EmitterError("expected NodeEvent, but got %s" % self.event) + + def expect_alias(self): + if self.event.anchor is None: + raise EmitterError("anchor is not specified for alias") + self.process_anchor('*') + self.state = self.states.pop() + + def expect_scalar(self): + self.increase_indent(flow=True) + self.process_scalar() + self.indent = self.indents.pop() + self.state = self.states.pop() + + # Flow sequence handlers. + + def expect_flow_sequence(self): + self.write_indicator('[', True, whitespace=True) + self.flow_level += 1 + self.increase_indent(flow=True) + self.state = self.expect_first_flow_sequence_item + + def expect_first_flow_sequence_item(self): + if isinstance(self.event, SequenceEndEvent): + self.indent = self.indents.pop() + self.flow_level -= 1 + self.write_indicator(']', False) + self.state = self.states.pop() + else: + if self.canonical or self.column > self.best_width: + self.write_indent() + self.states.append(self.expect_flow_sequence_item) + self.expect_node(sequence=True) + + def expect_flow_sequence_item(self): + if isinstance(self.event, SequenceEndEvent): + self.indent = self.indents.pop() + self.flow_level -= 1 + if self.canonical: + self.write_indicator(',', False) + self.write_indent() + self.write_indicator(']', False) + self.state = self.states.pop() + else: + self.write_indicator(',', False) + if self.canonical or self.column > self.best_width: + self.write_indent() + self.states.append(self.expect_flow_sequence_item) + self.expect_node(sequence=True) + + # Flow mapping handlers. + + def expect_flow_mapping(self): + self.write_indicator('{', True, whitespace=True) + self.flow_level += 1 + self.increase_indent(flow=True) + self.state = self.expect_first_flow_mapping_key + + def expect_first_flow_mapping_key(self): + if isinstance(self.event, MappingEndEvent): + self.indent = self.indents.pop() + self.flow_level -= 1 + self.write_indicator('}', False) + self.state = self.states.pop() + else: + if self.canonical or self.column > self.best_width: + self.write_indent() + if not self.canonical and self.check_simple_key(): + self.states.append(self.expect_flow_mapping_simple_value) + self.expect_node(mapping=True, simple_key=True) + else: + self.write_indicator('?', True) + self.states.append(self.expect_flow_mapping_value) + self.expect_node(mapping=True) + + def expect_flow_mapping_key(self): + if isinstance(self.event, MappingEndEvent): + self.indent = self.indents.pop() + self.flow_level -= 1 + if self.canonical: + self.write_indicator(',', False) + self.write_indent() + self.write_indicator('}', False) + self.state = self.states.pop() + else: + self.write_indicator(',', False) + if self.canonical or self.column > self.best_width: + self.write_indent() + if not self.canonical and self.check_simple_key(): + self.states.append(self.expect_flow_mapping_simple_value) + self.expect_node(mapping=True, simple_key=True) + else: + self.write_indicator('?', True) + self.states.append(self.expect_flow_mapping_value) + self.expect_node(mapping=True) + + def expect_flow_mapping_simple_value(self): + self.write_indicator(':', False) + self.states.append(self.expect_flow_mapping_key) + self.expect_node(mapping=True) + + def expect_flow_mapping_value(self): + if self.canonical or self.column > self.best_width: + self.write_indent() + self.write_indicator(':', True) + self.states.append(self.expect_flow_mapping_key) + self.expect_node(mapping=True) + + # Block sequence handlers. + + def expect_block_sequence(self): + indentless = (self.mapping_context and not self.indention) + self.increase_indent(flow=False, indentless=indentless) + self.state = self.expect_first_block_sequence_item + + def expect_first_block_sequence_item(self): + return self.expect_block_sequence_item(first=True) + + def expect_block_sequence_item(self, first=False): + if not first and isinstance(self.event, SequenceEndEvent): + self.indent = self.indents.pop() + self.state = self.states.pop() + else: + self.write_indent() + self.write_indicator('-', True, indention=True) + self.states.append(self.expect_block_sequence_item) + self.expect_node(sequence=True) + + # Block mapping handlers. + + def expect_block_mapping(self): + self.increase_indent(flow=False) + self.state = self.expect_first_block_mapping_key + + def expect_first_block_mapping_key(self): + return self.expect_block_mapping_key(first=True) + + def expect_block_mapping_key(self, first=False): + if not first and isinstance(self.event, MappingEndEvent): + self.indent = self.indents.pop() + self.state = self.states.pop() + else: + self.write_indent() + if self.check_simple_key(): + self.states.append(self.expect_block_mapping_simple_value) + self.expect_node(mapping=True, simple_key=True) + else: + self.write_indicator('?', True, indention=True) + self.states.append(self.expect_block_mapping_value) + self.expect_node(mapping=True) + + def expect_block_mapping_simple_value(self): + self.write_indicator(':', False) + self.states.append(self.expect_block_mapping_key) + self.expect_node(mapping=True) + + def expect_block_mapping_value(self): + self.write_indent() + self.write_indicator(':', True, indention=True) + self.states.append(self.expect_block_mapping_key) + self.expect_node(mapping=True) + + # Checkers. + + def check_empty_sequence(self): + return (isinstance(self.event, SequenceStartEvent) and self.events + and isinstance(self.events[0], SequenceEndEvent)) + + def check_empty_mapping(self): + return (isinstance(self.event, MappingStartEvent) and self.events + and isinstance(self.events[0], MappingEndEvent)) + + def check_empty_document(self): + if not isinstance(self.event, DocumentStartEvent) or not self.events: + return False + event = self.events[0] + return (isinstance(event, ScalarEvent) and event.anchor is None + and event.tag is None and event.implicit and event.value == '') + + def check_simple_key(self): + length = 0 + if isinstance(self.event, NodeEvent) and self.event.anchor is not None: + if self.prepared_anchor is None: + self.prepared_anchor = self.prepare_anchor(self.event.anchor) + length += len(self.prepared_anchor) + if isinstance(self.event, (ScalarEvent, CollectionStartEvent)) \ + and self.event.tag is not None: + if self.prepared_tag is None: + self.prepared_tag = self.prepare_tag(self.event.tag) + length += len(self.prepared_tag) + if isinstance(self.event, ScalarEvent): + if self.analysis is None: + self.analysis = self.analyze_scalar(self.event.value) + length += len(self.analysis.scalar) + return (length < 128 and (isinstance(self.event, AliasEvent) + or (isinstance(self.event, ScalarEvent) + and not self.analysis.empty and not self.analysis.multiline) + or self.check_empty_sequence() or self.check_empty_mapping())) + + # Anchor, Tag, and Scalar processors. + + def process_anchor(self, indicator): + if self.event.anchor is None: + self.prepared_anchor = None + return + if self.prepared_anchor is None: + self.prepared_anchor = self.prepare_anchor(self.event.anchor) + if self.prepared_anchor: + self.write_indicator(indicator+self.prepared_anchor, True) + self.prepared_anchor = None + + def process_tag(self): + tag = self.event.tag + if isinstance(self.event, ScalarEvent): + if self.style is None: + self.style = self.choose_scalar_style() + if ((not self.canonical or tag is None) and + ((self.style == '' and self.event.implicit[0]) + or (self.style != '' and self.event.implicit[1]))): + self.prepared_tag = None + return + if self.event.implicit[0] and tag is None: + tag = '!' + self.prepared_tag = None + else: + if (not self.canonical or tag is None) and self.event.implicit: + self.prepared_tag = None + return + if tag is None: + raise EmitterError("tag is not specified") + if self.prepared_tag is None: + self.prepared_tag = self.prepare_tag(tag) + if self.prepared_tag: + self.write_indicator(self.prepared_tag, True) + self.prepared_tag = None + + def choose_scalar_style(self): + if self.analysis is None: + self.analysis = self.analyze_scalar(self.event.value) + if self.event.style == '"' or self.canonical: + return '"' + if not self.event.style and self.event.implicit[0]: + if (not (self.simple_key_context and + (self.analysis.empty or self.analysis.multiline)) + and (self.flow_level and self.analysis.allow_flow_plain + or (not self.flow_level and self.analysis.allow_block_plain))): + return '' + if self.event.style and self.event.style in '|>': + if (not self.flow_level and not self.simple_key_context + and self.analysis.allow_block): + return self.event.style + if not self.event.style or self.event.style == '\'': + if (self.analysis.allow_single_quoted and + not (self.simple_key_context and self.analysis.multiline)): + return '\'' + return '"' + + def process_scalar(self): + if self.analysis is None: + self.analysis = self.analyze_scalar(self.event.value) + if self.style is None: + self.style = self.choose_scalar_style() + split = (not self.simple_key_context) + #if self.analysis.multiline and split \ + # and (not self.style or self.style in '\'\"'): + # self.write_indent() + if self.style == '"': + self.write_double_quoted(self.analysis.scalar, split) + elif self.style == '\'': + self.write_single_quoted(self.analysis.scalar, split) + elif self.style == '>': + self.write_folded(self.analysis.scalar) + elif self.style == '|': + self.write_literal(self.analysis.scalar) + else: + self.write_plain(self.analysis.scalar, split) + self.analysis = None + self.style = None + + # Analyzers. + + def prepare_version(self, version): + major, minor = version + if major != 1: + raise EmitterError("unsupported YAML version: %d.%d" % (major, minor)) + return '%d.%d' % (major, minor) + + def prepare_tag_handle(self, handle): + if not handle: + raise EmitterError("tag handle must not be empty") + if handle[0] != '!' or handle[-1] != '!': + raise EmitterError("tag handle must start and end with '!': %r" % handle) + for ch in handle[1:-1]: + if not ('0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \ + or ch in '-_'): + raise EmitterError("invalid character %r in the tag handle: %r" + % (ch, handle)) + return handle + + def prepare_tag_prefix(self, prefix): + if not prefix: + raise EmitterError("tag prefix must not be empty") + chunks = [] + start = end = 0 + if prefix[0] == '!': + end = 1 + while end < len(prefix): + ch = prefix[end] + if '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \ + or ch in '-;/?!:@&=+$,_.~*\'()[]': + end += 1 + else: + if start < end: + chunks.append(prefix[start:end]) + start = end = end+1 + data = ch.encode('utf-8') + for ch in data: + chunks.append('%%%02X' % ord(ch)) + if start < end: + chunks.append(prefix[start:end]) + return ''.join(chunks) + + def prepare_tag(self, tag): + if not tag: + raise EmitterError("tag must not be empty") + if tag == '!': + return tag + handle = None + suffix = tag + prefixes = sorted(self.tag_prefixes.keys()) + for prefix in prefixes: + if tag.startswith(prefix) \ + and (prefix == '!' or len(prefix) < len(tag)): + handle = self.tag_prefixes[prefix] + suffix = tag[len(prefix):] + chunks = [] + start = end = 0 + while end < len(suffix): + ch = suffix[end] + if '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \ + or ch in '-;/?:@&=+$,_.~*\'()[]' \ + or (ch == '!' and handle != '!'): + end += 1 + else: + if start < end: + chunks.append(suffix[start:end]) + start = end = end+1 + data = ch.encode('utf-8') + for ch in data: + chunks.append('%%%02X' % ch) + if start < end: + chunks.append(suffix[start:end]) + suffix_text = ''.join(chunks) + if handle: + return '%s%s' % (handle, suffix_text) + else: + return '!<%s>' % suffix_text + + def prepare_anchor(self, anchor): + if not anchor: + raise EmitterError("anchor must not be empty") + for ch in anchor: + if not ('0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \ + or ch in '-_'): + raise EmitterError("invalid character %r in the anchor: %r" + % (ch, anchor)) + return anchor + + def analyze_scalar(self, scalar): + + # Empty scalar is a special case. + if not scalar: + return ScalarAnalysis(scalar=scalar, empty=True, multiline=False, + allow_flow_plain=False, allow_block_plain=True, + allow_single_quoted=True, allow_double_quoted=True, + allow_block=False) + + # Indicators and special characters. + block_indicators = False + flow_indicators = False + line_breaks = False + special_characters = False + + # Important whitespace combinations. + leading_space = False + leading_break = False + trailing_space = False + trailing_break = False + break_space = False + space_break = False + + # Check document indicators. + if scalar.startswith('---') or scalar.startswith('...'): + block_indicators = True + flow_indicators = True + + # First character or preceded by a whitespace. + preceded_by_whitespace = True + + # Last character or followed by a whitespace. + followed_by_whitespace = (len(scalar) == 1 or + scalar[1] in '\0 \t\r\n\x85\u2028\u2029') + + # The previous character is a space. + previous_space = False + + # The previous character is a break. + previous_break = False + + index = 0 + while index < len(scalar): + ch = scalar[index] + + # Check for indicators. + if index == 0: + # Leading indicators are special characters. + if ch in '#,[]{}&*!|>\'\"%@`': + flow_indicators = True + block_indicators = True + if ch in '?:': + flow_indicators = True + if followed_by_whitespace: + block_indicators = True + if ch == '-' and followed_by_whitespace: + flow_indicators = True + block_indicators = True + else: + # Some indicators cannot appear within a scalar as well. + if ch in ',?[]{}': + flow_indicators = True + if ch == ':': + flow_indicators = True + if followed_by_whitespace: + block_indicators = True + if ch == '#' and preceded_by_whitespace: + flow_indicators = True + block_indicators = True + + # Check for line breaks, special, and unicode characters. + if ch in '\n\x85\u2028\u2029': + line_breaks = True + if not (ch == '\n' or '\x20' <= ch <= '\x7E'): + if (ch == '\x85' or '\xA0' <= ch <= '\uD7FF' + or '\uE000' <= ch <= '\uFFFD' + or '\U00010000' <= ch < '\U0010ffff') and ch != '\uFEFF': + unicode_characters = True + if not self.allow_unicode: + special_characters = True + else: + special_characters = True + + # Detect important whitespace combinations. + if ch == ' ': + if index == 0: + leading_space = True + if index == len(scalar)-1: + trailing_space = True + if previous_break: + break_space = True + previous_space = True + previous_break = False + elif ch in '\n\x85\u2028\u2029': + if index == 0: + leading_break = True + if index == len(scalar)-1: + trailing_break = True + if previous_space: + space_break = True + previous_space = False + previous_break = True + else: + previous_space = False + previous_break = False + + # Prepare for the next character. + index += 1 + preceded_by_whitespace = (ch in '\0 \t\r\n\x85\u2028\u2029') + followed_by_whitespace = (index+1 >= len(scalar) or + scalar[index+1] in '\0 \t\r\n\x85\u2028\u2029') + + # Let's decide what styles are allowed. + allow_flow_plain = True + allow_block_plain = True + allow_single_quoted = True + allow_double_quoted = True + allow_block = True + + # Leading and trailing whitespaces are bad for plain scalars. + if (leading_space or leading_break + or trailing_space or trailing_break): + allow_flow_plain = allow_block_plain = False + + # We do not permit trailing spaces for block scalars. + if trailing_space: + allow_block = False + + # Spaces at the beginning of a new line are only acceptable for block + # scalars. + if break_space: + allow_flow_plain = allow_block_plain = allow_single_quoted = False + + # Spaces followed by breaks, as well as special character are only + # allowed for double quoted scalars. + if space_break or special_characters: + allow_flow_plain = allow_block_plain = \ + allow_single_quoted = allow_block = False + + # Although the plain scalar writer supports breaks, we never emit + # multiline plain scalars. + if line_breaks: + allow_flow_plain = allow_block_plain = False + + # Flow indicators are forbidden for flow plain scalars. + if flow_indicators: + allow_flow_plain = False + + # Block indicators are forbidden for block plain scalars. + if block_indicators: + allow_block_plain = False + + return ScalarAnalysis(scalar=scalar, + empty=False, multiline=line_breaks, + allow_flow_plain=allow_flow_plain, + allow_block_plain=allow_block_plain, + allow_single_quoted=allow_single_quoted, + allow_double_quoted=allow_double_quoted, + allow_block=allow_block) + + # Writers. + + def flush_stream(self): + if hasattr(self.stream, 'flush'): + self.stream.flush() + + def write_stream_start(self): + # Write BOM if needed. + if self.encoding and self.encoding.startswith('utf-16'): + self.stream.write('\uFEFF'.encode(self.encoding)) + + def write_stream_end(self): + self.flush_stream() + + def write_indicator(self, indicator, need_whitespace, + whitespace=False, indention=False): + if self.whitespace or not need_whitespace: + data = indicator + else: + data = ' '+indicator + self.whitespace = whitespace + self.indention = self.indention and indention + self.column += len(data) + self.open_ended = False + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + + def write_indent(self): + indent = self.indent or 0 + if not self.indention or self.column > indent \ + or (self.column == indent and not self.whitespace): + self.write_line_break() + if self.column < indent: + self.whitespace = True + data = ' '*(indent-self.column) + self.column = indent + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + + def write_line_break(self, data=None): + if data is None: + data = self.best_line_break + self.whitespace = True + self.indention = True + self.line += 1 + self.column = 0 + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + + def write_version_directive(self, version_text): + data = '%%YAML %s' % version_text + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + self.write_line_break() + + def write_tag_directive(self, handle_text, prefix_text): + data = '%%TAG %s %s' % (handle_text, prefix_text) + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + self.write_line_break() + + # Scalar streams. + + def write_single_quoted(self, text, split=True): + self.write_indicator('\'', True) + spaces = False + breaks = False + start = end = 0 + while end <= len(text): + ch = None + if end < len(text): + ch = text[end] + if spaces: + if ch is None or ch != ' ': + if start+1 == end and self.column > self.best_width and split \ + and start != 0 and end != len(text): + self.write_indent() + else: + data = text[start:end] + self.column += len(data) + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + start = end + elif breaks: + if ch is None or ch not in '\n\x85\u2028\u2029': + if text[start] == '\n': + self.write_line_break() + for br in text[start:end]: + if br == '\n': + self.write_line_break() + else: + self.write_line_break(br) + self.write_indent() + start = end + else: + if ch is None or ch in ' \n\x85\u2028\u2029' or ch == '\'': + if start < end: + data = text[start:end] + self.column += len(data) + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + start = end + if ch == '\'': + data = '\'\'' + self.column += 2 + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + start = end + 1 + if ch is not None: + spaces = (ch == ' ') + breaks = (ch in '\n\x85\u2028\u2029') + end += 1 + self.write_indicator('\'', False) + + ESCAPE_REPLACEMENTS = { + '\0': '0', + '\x07': 'a', + '\x08': 'b', + '\x09': 't', + '\x0A': 'n', + '\x0B': 'v', + '\x0C': 'f', + '\x0D': 'r', + '\x1B': 'e', + '\"': '\"', + '\\': '\\', + '\x85': 'N', + '\xA0': '_', + '\u2028': 'L', + '\u2029': 'P', + } + + def write_double_quoted(self, text, split=True): + self.write_indicator('"', True) + start = end = 0 + while end <= len(text): + ch = None + if end < len(text): + ch = text[end] + if ch is None or ch in '"\\\x85\u2028\u2029\uFEFF' \ + or not ('\x20' <= ch <= '\x7E' + or (self.allow_unicode + and ('\xA0' <= ch <= '\uD7FF' + or '\uE000' <= ch <= '\uFFFD'))): + if start < end: + data = text[start:end] + self.column += len(data) + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + start = end + if ch is not None: + if ch in self.ESCAPE_REPLACEMENTS: + data = '\\'+self.ESCAPE_REPLACEMENTS[ch] + elif ch <= '\xFF': + data = '\\x%02X' % ord(ch) + elif ch <= '\uFFFF': + data = '\\u%04X' % ord(ch) + else: + data = '\\U%08X' % ord(ch) + self.column += len(data) + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + start = end+1 + if 0 < end < len(text)-1 and (ch == ' ' or start >= end) \ + and self.column+(end-start) > self.best_width and split: + data = text[start:end]+'\\' + if start < end: + start = end + self.column += len(data) + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + self.write_indent() + self.whitespace = False + self.indention = False + if text[start] == ' ': + data = '\\' + self.column += len(data) + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + end += 1 + self.write_indicator('"', False) + + def determine_block_hints(self, text): + hints = '' + if text: + if text[0] in ' \n\x85\u2028\u2029': + hints += str(self.best_indent) + if text[-1] not in '\n\x85\u2028\u2029': + hints += '-' + elif len(text) == 1 or text[-2] in '\n\x85\u2028\u2029': + hints += '+' + return hints + + def write_folded(self, text): + hints = self.determine_block_hints(text) + self.write_indicator('>'+hints, True) + if hints[-1:] == '+': + self.open_ended = True + self.write_line_break() + leading_space = True + spaces = False + breaks = True + start = end = 0 + while end <= len(text): + ch = None + if end < len(text): + ch = text[end] + if breaks: + if ch is None or ch not in '\n\x85\u2028\u2029': + if not leading_space and ch is not None and ch != ' ' \ + and text[start] == '\n': + self.write_line_break() + leading_space = (ch == ' ') + for br in text[start:end]: + if br == '\n': + self.write_line_break() + else: + self.write_line_break(br) + if ch is not None: + self.write_indent() + start = end + elif spaces: + if ch != ' ': + if start+1 == end and self.column > self.best_width: + self.write_indent() + else: + data = text[start:end] + self.column += len(data) + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + start = end + else: + if ch is None or ch in ' \n\x85\u2028\u2029': + data = text[start:end] + self.column += len(data) + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + if ch is None: + self.write_line_break() + start = end + if ch is not None: + breaks = (ch in '\n\x85\u2028\u2029') + spaces = (ch == ' ') + end += 1 + + def write_literal(self, text): + hints = self.determine_block_hints(text) + self.write_indicator('|'+hints, True) + if hints[-1:] == '+': + self.open_ended = True + self.write_line_break() + breaks = True + start = end = 0 + while end <= len(text): + ch = None + if end < len(text): + ch = text[end] + if breaks: + if ch is None or ch not in '\n\x85\u2028\u2029': + for br in text[start:end]: + if br == '\n': + self.write_line_break() + else: + self.write_line_break(br) + if ch is not None: + self.write_indent() + start = end + else: + if ch is None or ch in '\n\x85\u2028\u2029': + data = text[start:end] + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + if ch is None: + self.write_line_break() + start = end + if ch is not None: + breaks = (ch in '\n\x85\u2028\u2029') + end += 1 + + def write_plain(self, text, split=True): + if self.root_context: + self.open_ended = True + if not text: + return + if not self.whitespace: + data = ' ' + self.column += len(data) + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + self.whitespace = False + self.indention = False + spaces = False + breaks = False + start = end = 0 + while end <= len(text): + ch = None + if end < len(text): + ch = text[end] + if spaces: + if ch != ' ': + if start+1 == end and self.column > self.best_width and split: + self.write_indent() + self.whitespace = False + self.indention = False + else: + data = text[start:end] + self.column += len(data) + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + start = end + elif breaks: + if ch not in '\n\x85\u2028\u2029': + if text[start] == '\n': + self.write_line_break() + for br in text[start:end]: + if br == '\n': + self.write_line_break() + else: + self.write_line_break(br) + self.write_indent() + self.whitespace = False + self.indention = False + start = end + else: + if ch is None or ch in ' \n\x85\u2028\u2029': + data = text[start:end] + self.column += len(data) + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + start = end + if ch is not None: + spaces = (ch == ' ') + breaks = (ch in '\n\x85\u2028\u2029') + end += 1 diff --git a/venv/lib/python3.12/site-packages/yaml/error.py b/venv/lib/python3.12/site-packages/yaml/error.py new file mode 100644 index 00000000..b796b4dc --- /dev/null +++ b/venv/lib/python3.12/site-packages/yaml/error.py @@ -0,0 +1,75 @@ + +__all__ = ['Mark', 'YAMLError', 'MarkedYAMLError'] + +class Mark: + + def __init__(self, name, index, line, column, buffer, pointer): + self.name = name + self.index = index + self.line = line + self.column = column + self.buffer = buffer + self.pointer = pointer + + def get_snippet(self, indent=4, max_length=75): + if self.buffer is None: + return None + head = '' + start = self.pointer + while start > 0 and self.buffer[start-1] not in '\0\r\n\x85\u2028\u2029': + start -= 1 + if self.pointer-start > max_length/2-1: + head = ' ... ' + start += 5 + break + tail = '' + end = self.pointer + while end < len(self.buffer) and self.buffer[end] not in '\0\r\n\x85\u2028\u2029': + end += 1 + if end-self.pointer > max_length/2-1: + tail = ' ... ' + end -= 5 + break + snippet = self.buffer[start:end] + return ' '*indent + head + snippet + tail + '\n' \ + + ' '*(indent+self.pointer-start+len(head)) + '^' + + def __str__(self): + snippet = self.get_snippet() + where = " in \"%s\", line %d, column %d" \ + % (self.name, self.line+1, self.column+1) + if snippet is not None: + where += ":\n"+snippet + return where + +class YAMLError(Exception): + pass + +class MarkedYAMLError(YAMLError): + + def __init__(self, context=None, context_mark=None, + problem=None, problem_mark=None, note=None): + self.context = context + self.context_mark = context_mark + self.problem = problem + self.problem_mark = problem_mark + self.note = note + + def __str__(self): + lines = [] + if self.context is not None: + lines.append(self.context) + if self.context_mark is not None \ + and (self.problem is None or self.problem_mark is None + or self.context_mark.name != self.problem_mark.name + or self.context_mark.line != self.problem_mark.line + or self.context_mark.column != self.problem_mark.column): + lines.append(str(self.context_mark)) + if self.problem is not None: + lines.append(self.problem) + if self.problem_mark is not None: + lines.append(str(self.problem_mark)) + if self.note is not None: + lines.append(self.note) + return '\n'.join(lines) + diff --git a/venv/lib/python3.12/site-packages/yaml/events.py b/venv/lib/python3.12/site-packages/yaml/events.py new file mode 100644 index 00000000..f79ad389 --- /dev/null +++ b/venv/lib/python3.12/site-packages/yaml/events.py @@ -0,0 +1,86 @@ + +# Abstract classes. + +class Event(object): + def __init__(self, start_mark=None, end_mark=None): + self.start_mark = start_mark + self.end_mark = end_mark + def __repr__(self): + attributes = [key for key in ['anchor', 'tag', 'implicit', 'value'] + if hasattr(self, key)] + arguments = ', '.join(['%s=%r' % (key, getattr(self, key)) + for key in attributes]) + return '%s(%s)' % (self.__class__.__name__, arguments) + +class NodeEvent(Event): + def __init__(self, anchor, start_mark=None, end_mark=None): + self.anchor = anchor + self.start_mark = start_mark + self.end_mark = end_mark + +class CollectionStartEvent(NodeEvent): + def __init__(self, anchor, tag, implicit, start_mark=None, end_mark=None, + flow_style=None): + self.anchor = anchor + self.tag = tag + self.implicit = implicit + self.start_mark = start_mark + self.end_mark = end_mark + self.flow_style = flow_style + +class CollectionEndEvent(Event): + pass + +# Implementations. + +class StreamStartEvent(Event): + def __init__(self, start_mark=None, end_mark=None, encoding=None): + self.start_mark = start_mark + self.end_mark = end_mark + self.encoding = encoding + +class StreamEndEvent(Event): + pass + +class DocumentStartEvent(Event): + def __init__(self, start_mark=None, end_mark=None, + explicit=None, version=None, tags=None): + self.start_mark = start_mark + self.end_mark = end_mark + self.explicit = explicit + self.version = version + self.tags = tags + +class DocumentEndEvent(Event): + def __init__(self, start_mark=None, end_mark=None, + explicit=None): + self.start_mark = start_mark + self.end_mark = end_mark + self.explicit = explicit + +class AliasEvent(NodeEvent): + pass + +class ScalarEvent(NodeEvent): + def __init__(self, anchor, tag, implicit, value, + start_mark=None, end_mark=None, style=None): + self.anchor = anchor + self.tag = tag + self.implicit = implicit + self.value = value + self.start_mark = start_mark + self.end_mark = end_mark + self.style = style + +class SequenceStartEvent(CollectionStartEvent): + pass + +class SequenceEndEvent(CollectionEndEvent): + pass + +class MappingStartEvent(CollectionStartEvent): + pass + +class MappingEndEvent(CollectionEndEvent): + pass + diff --git a/venv/lib/python3.12/site-packages/yaml/loader.py b/venv/lib/python3.12/site-packages/yaml/loader.py new file mode 100644 index 00000000..e90c1122 --- /dev/null +++ b/venv/lib/python3.12/site-packages/yaml/loader.py @@ -0,0 +1,63 @@ + +__all__ = ['BaseLoader', 'FullLoader', 'SafeLoader', 'Loader', 'UnsafeLoader'] + +from .reader import * +from .scanner import * +from .parser import * +from .composer import * +from .constructor import * +from .resolver import * + +class BaseLoader(Reader, Scanner, Parser, Composer, BaseConstructor, BaseResolver): + + def __init__(self, stream): + Reader.__init__(self, stream) + Scanner.__init__(self) + Parser.__init__(self) + Composer.__init__(self) + BaseConstructor.__init__(self) + BaseResolver.__init__(self) + +class FullLoader(Reader, Scanner, Parser, Composer, FullConstructor, Resolver): + + def __init__(self, stream): + Reader.__init__(self, stream) + Scanner.__init__(self) + Parser.__init__(self) + Composer.__init__(self) + FullConstructor.__init__(self) + Resolver.__init__(self) + +class SafeLoader(Reader, Scanner, Parser, Composer, SafeConstructor, Resolver): + + def __init__(self, stream): + Reader.__init__(self, stream) + Scanner.__init__(self) + Parser.__init__(self) + Composer.__init__(self) + SafeConstructor.__init__(self) + Resolver.__init__(self) + +class Loader(Reader, Scanner, Parser, Composer, Constructor, Resolver): + + def __init__(self, stream): + Reader.__init__(self, stream) + Scanner.__init__(self) + Parser.__init__(self) + Composer.__init__(self) + Constructor.__init__(self) + Resolver.__init__(self) + +# UnsafeLoader is the same as Loader (which is and was always unsafe on +# untrusted input). Use of either Loader or UnsafeLoader should be rare, since +# FullLoad should be able to load almost all YAML safely. Loader is left intact +# to ensure backwards compatibility. +class UnsafeLoader(Reader, Scanner, Parser, Composer, Constructor, Resolver): + + def __init__(self, stream): + Reader.__init__(self, stream) + Scanner.__init__(self) + Parser.__init__(self) + Composer.__init__(self) + Constructor.__init__(self) + Resolver.__init__(self) diff --git a/venv/lib/python3.12/site-packages/yaml/nodes.py b/venv/lib/python3.12/site-packages/yaml/nodes.py new file mode 100644 index 00000000..c4f070c4 --- /dev/null +++ b/venv/lib/python3.12/site-packages/yaml/nodes.py @@ -0,0 +1,49 @@ + +class Node(object): + def __init__(self, tag, value, start_mark, end_mark): + self.tag = tag + self.value = value + self.start_mark = start_mark + self.end_mark = end_mark + def __repr__(self): + value = self.value + #if isinstance(value, list): + # if len(value) == 0: + # value = '' + # elif len(value) == 1: + # value = '<1 item>' + # else: + # value = '<%d items>' % len(value) + #else: + # if len(value) > 75: + # value = repr(value[:70]+u' ... ') + # else: + # value = repr(value) + value = repr(value) + return '%s(tag=%r, value=%s)' % (self.__class__.__name__, self.tag, value) + +class ScalarNode(Node): + id = 'scalar' + def __init__(self, tag, value, + start_mark=None, end_mark=None, style=None): + self.tag = tag + self.value = value + self.start_mark = start_mark + self.end_mark = end_mark + self.style = style + +class CollectionNode(Node): + def __init__(self, tag, value, + start_mark=None, end_mark=None, flow_style=None): + self.tag = tag + self.value = value + self.start_mark = start_mark + self.end_mark = end_mark + self.flow_style = flow_style + +class SequenceNode(CollectionNode): + id = 'sequence' + +class MappingNode(CollectionNode): + id = 'mapping' + diff --git a/venv/lib/python3.12/site-packages/yaml/parser.py b/venv/lib/python3.12/site-packages/yaml/parser.py new file mode 100644 index 00000000..13a5995d --- /dev/null +++ b/venv/lib/python3.12/site-packages/yaml/parser.py @@ -0,0 +1,589 @@ + +# The following YAML grammar is LL(1) and is parsed by a recursive descent +# parser. +# +# stream ::= STREAM-START implicit_document? explicit_document* STREAM-END +# implicit_document ::= block_node DOCUMENT-END* +# explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* +# block_node_or_indentless_sequence ::= +# ALIAS +# | properties (block_content | indentless_block_sequence)? +# | block_content +# | indentless_block_sequence +# block_node ::= ALIAS +# | properties block_content? +# | block_content +# flow_node ::= ALIAS +# | properties flow_content? +# | flow_content +# properties ::= TAG ANCHOR? | ANCHOR TAG? +# block_content ::= block_collection | flow_collection | SCALAR +# flow_content ::= flow_collection | SCALAR +# block_collection ::= block_sequence | block_mapping +# flow_collection ::= flow_sequence | flow_mapping +# block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END +# indentless_sequence ::= (BLOCK-ENTRY block_node?)+ +# block_mapping ::= BLOCK-MAPPING_START +# ((KEY block_node_or_indentless_sequence?)? +# (VALUE block_node_or_indentless_sequence?)?)* +# BLOCK-END +# flow_sequence ::= FLOW-SEQUENCE-START +# (flow_sequence_entry FLOW-ENTRY)* +# flow_sequence_entry? +# FLOW-SEQUENCE-END +# flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? +# flow_mapping ::= FLOW-MAPPING-START +# (flow_mapping_entry FLOW-ENTRY)* +# flow_mapping_entry? +# FLOW-MAPPING-END +# flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? +# +# FIRST sets: +# +# stream: { STREAM-START } +# explicit_document: { DIRECTIVE DOCUMENT-START } +# implicit_document: FIRST(block_node) +# block_node: { ALIAS TAG ANCHOR SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START } +# flow_node: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START } +# block_content: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR } +# flow_content: { FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR } +# block_collection: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START } +# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START } +# block_sequence: { BLOCK-SEQUENCE-START } +# block_mapping: { BLOCK-MAPPING-START } +# block_node_or_indentless_sequence: { ALIAS ANCHOR TAG SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START BLOCK-ENTRY } +# indentless_sequence: { ENTRY } +# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START } +# flow_sequence: { FLOW-SEQUENCE-START } +# flow_mapping: { FLOW-MAPPING-START } +# flow_sequence_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY } +# flow_mapping_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY } + +__all__ = ['Parser', 'ParserError'] + +from .error import MarkedYAMLError +from .tokens import * +from .events import * +from .scanner import * + +class ParserError(MarkedYAMLError): + pass + +class Parser: + # Since writing a recursive-descendant parser is a straightforward task, we + # do not give many comments here. + + DEFAULT_TAGS = { + '!': '!', + '!!': 'tag:yaml.org,2002:', + } + + def __init__(self): + self.current_event = None + self.yaml_version = None + self.tag_handles = {} + self.states = [] + self.marks = [] + self.state = self.parse_stream_start + + def dispose(self): + # Reset the state attributes (to clear self-references) + self.states = [] + self.state = None + + def check_event(self, *choices): + # Check the type of the next event. + if self.current_event is None: + if self.state: + self.current_event = self.state() + if self.current_event is not None: + if not choices: + return True + for choice in choices: + if isinstance(self.current_event, choice): + return True + return False + + def peek_event(self): + # Get the next event. + if self.current_event is None: + if self.state: + self.current_event = self.state() + return self.current_event + + def get_event(self): + # Get the next event and proceed further. + if self.current_event is None: + if self.state: + self.current_event = self.state() + value = self.current_event + self.current_event = None + return value + + # stream ::= STREAM-START implicit_document? explicit_document* STREAM-END + # implicit_document ::= block_node DOCUMENT-END* + # explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* + + def parse_stream_start(self): + + # Parse the stream start. + token = self.get_token() + event = StreamStartEvent(token.start_mark, token.end_mark, + encoding=token.encoding) + + # Prepare the next state. + self.state = self.parse_implicit_document_start + + return event + + def parse_implicit_document_start(self): + + # Parse an implicit document. + if not self.check_token(DirectiveToken, DocumentStartToken, + StreamEndToken): + self.tag_handles = self.DEFAULT_TAGS + token = self.peek_token() + start_mark = end_mark = token.start_mark + event = DocumentStartEvent(start_mark, end_mark, + explicit=False) + + # Prepare the next state. + self.states.append(self.parse_document_end) + self.state = self.parse_block_node + + return event + + else: + return self.parse_document_start() + + def parse_document_start(self): + + # Parse any extra document end indicators. + while self.check_token(DocumentEndToken): + self.get_token() + + # Parse an explicit document. + if not self.check_token(StreamEndToken): + token = self.peek_token() + start_mark = token.start_mark + version, tags = self.process_directives() + if not self.check_token(DocumentStartToken): + raise ParserError(None, None, + "expected '', but found %r" + % self.peek_token().id, + self.peek_token().start_mark) + token = self.get_token() + end_mark = token.end_mark + event = DocumentStartEvent(start_mark, end_mark, + explicit=True, version=version, tags=tags) + self.states.append(self.parse_document_end) + self.state = self.parse_document_content + else: + # Parse the end of the stream. + token = self.get_token() + event = StreamEndEvent(token.start_mark, token.end_mark) + assert not self.states + assert not self.marks + self.state = None + return event + + def parse_document_end(self): + + # Parse the document end. + token = self.peek_token() + start_mark = end_mark = token.start_mark + explicit = False + if self.check_token(DocumentEndToken): + token = self.get_token() + end_mark = token.end_mark + explicit = True + event = DocumentEndEvent(start_mark, end_mark, + explicit=explicit) + + # Prepare the next state. + self.state = self.parse_document_start + + return event + + def parse_document_content(self): + if self.check_token(DirectiveToken, + DocumentStartToken, DocumentEndToken, StreamEndToken): + event = self.process_empty_scalar(self.peek_token().start_mark) + self.state = self.states.pop() + return event + else: + return self.parse_block_node() + + def process_directives(self): + self.yaml_version = None + self.tag_handles = {} + while self.check_token(DirectiveToken): + token = self.get_token() + if token.name == 'YAML': + if self.yaml_version is not None: + raise ParserError(None, None, + "found duplicate YAML directive", token.start_mark) + major, minor = token.value + if major != 1: + raise ParserError(None, None, + "found incompatible YAML document (version 1.* is required)", + token.start_mark) + self.yaml_version = token.value + elif token.name == 'TAG': + handle, prefix = token.value + if handle in self.tag_handles: + raise ParserError(None, None, + "duplicate tag handle %r" % handle, + token.start_mark) + self.tag_handles[handle] = prefix + if self.tag_handles: + value = self.yaml_version, self.tag_handles.copy() + else: + value = self.yaml_version, None + for key in self.DEFAULT_TAGS: + if key not in self.tag_handles: + self.tag_handles[key] = self.DEFAULT_TAGS[key] + return value + + # block_node_or_indentless_sequence ::= ALIAS + # | properties (block_content | indentless_block_sequence)? + # | block_content + # | indentless_block_sequence + # block_node ::= ALIAS + # | properties block_content? + # | block_content + # flow_node ::= ALIAS + # | properties flow_content? + # | flow_content + # properties ::= TAG ANCHOR? | ANCHOR TAG? + # block_content ::= block_collection | flow_collection | SCALAR + # flow_content ::= flow_collection | SCALAR + # block_collection ::= block_sequence | block_mapping + # flow_collection ::= flow_sequence | flow_mapping + + def parse_block_node(self): + return self.parse_node(block=True) + + def parse_flow_node(self): + return self.parse_node() + + def parse_block_node_or_indentless_sequence(self): + return self.parse_node(block=True, indentless_sequence=True) + + def parse_node(self, block=False, indentless_sequence=False): + if self.check_token(AliasToken): + token = self.get_token() + event = AliasEvent(token.value, token.start_mark, token.end_mark) + self.state = self.states.pop() + else: + anchor = None + tag = None + start_mark = end_mark = tag_mark = None + if self.check_token(AnchorToken): + token = self.get_token() + start_mark = token.start_mark + end_mark = token.end_mark + anchor = token.value + if self.check_token(TagToken): + token = self.get_token() + tag_mark = token.start_mark + end_mark = token.end_mark + tag = token.value + elif self.check_token(TagToken): + token = self.get_token() + start_mark = tag_mark = token.start_mark + end_mark = token.end_mark + tag = token.value + if self.check_token(AnchorToken): + token = self.get_token() + end_mark = token.end_mark + anchor = token.value + if tag is not None: + handle, suffix = tag + if handle is not None: + if handle not in self.tag_handles: + raise ParserError("while parsing a node", start_mark, + "found undefined tag handle %r" % handle, + tag_mark) + tag = self.tag_handles[handle]+suffix + else: + tag = suffix + #if tag == '!': + # raise ParserError("while parsing a node", start_mark, + # "found non-specific tag '!'", tag_mark, + # "Please check 'http://pyyaml.org/wiki/YAMLNonSpecificTag' and share your opinion.") + if start_mark is None: + start_mark = end_mark = self.peek_token().start_mark + event = None + implicit = (tag is None or tag == '!') + if indentless_sequence and self.check_token(BlockEntryToken): + end_mark = self.peek_token().end_mark + event = SequenceStartEvent(anchor, tag, implicit, + start_mark, end_mark) + self.state = self.parse_indentless_sequence_entry + else: + if self.check_token(ScalarToken): + token = self.get_token() + end_mark = token.end_mark + if (token.plain and tag is None) or tag == '!': + implicit = (True, False) + elif tag is None: + implicit = (False, True) + else: + implicit = (False, False) + event = ScalarEvent(anchor, tag, implicit, token.value, + start_mark, end_mark, style=token.style) + self.state = self.states.pop() + elif self.check_token(FlowSequenceStartToken): + end_mark = self.peek_token().end_mark + event = SequenceStartEvent(anchor, tag, implicit, + start_mark, end_mark, flow_style=True) + self.state = self.parse_flow_sequence_first_entry + elif self.check_token(FlowMappingStartToken): + end_mark = self.peek_token().end_mark + event = MappingStartEvent(anchor, tag, implicit, + start_mark, end_mark, flow_style=True) + self.state = self.parse_flow_mapping_first_key + elif block and self.check_token(BlockSequenceStartToken): + end_mark = self.peek_token().start_mark + event = SequenceStartEvent(anchor, tag, implicit, + start_mark, end_mark, flow_style=False) + self.state = self.parse_block_sequence_first_entry + elif block and self.check_token(BlockMappingStartToken): + end_mark = self.peek_token().start_mark + event = MappingStartEvent(anchor, tag, implicit, + start_mark, end_mark, flow_style=False) + self.state = self.parse_block_mapping_first_key + elif anchor is not None or tag is not None: + # Empty scalars are allowed even if a tag or an anchor is + # specified. + event = ScalarEvent(anchor, tag, (implicit, False), '', + start_mark, end_mark) + self.state = self.states.pop() + else: + if block: + node = 'block' + else: + node = 'flow' + token = self.peek_token() + raise ParserError("while parsing a %s node" % node, start_mark, + "expected the node content, but found %r" % token.id, + token.start_mark) + return event + + # block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END + + def parse_block_sequence_first_entry(self): + token = self.get_token() + self.marks.append(token.start_mark) + return self.parse_block_sequence_entry() + + def parse_block_sequence_entry(self): + if self.check_token(BlockEntryToken): + token = self.get_token() + if not self.check_token(BlockEntryToken, BlockEndToken): + self.states.append(self.parse_block_sequence_entry) + return self.parse_block_node() + else: + self.state = self.parse_block_sequence_entry + return self.process_empty_scalar(token.end_mark) + if not self.check_token(BlockEndToken): + token = self.peek_token() + raise ParserError("while parsing a block collection", self.marks[-1], + "expected , but found %r" % token.id, token.start_mark) + token = self.get_token() + event = SequenceEndEvent(token.start_mark, token.end_mark) + self.state = self.states.pop() + self.marks.pop() + return event + + # indentless_sequence ::= (BLOCK-ENTRY block_node?)+ + + def parse_indentless_sequence_entry(self): + if self.check_token(BlockEntryToken): + token = self.get_token() + if not self.check_token(BlockEntryToken, + KeyToken, ValueToken, BlockEndToken): + self.states.append(self.parse_indentless_sequence_entry) + return self.parse_block_node() + else: + self.state = self.parse_indentless_sequence_entry + return self.process_empty_scalar(token.end_mark) + token = self.peek_token() + event = SequenceEndEvent(token.start_mark, token.start_mark) + self.state = self.states.pop() + return event + + # block_mapping ::= BLOCK-MAPPING_START + # ((KEY block_node_or_indentless_sequence?)? + # (VALUE block_node_or_indentless_sequence?)?)* + # BLOCK-END + + def parse_block_mapping_first_key(self): + token = self.get_token() + self.marks.append(token.start_mark) + return self.parse_block_mapping_key() + + def parse_block_mapping_key(self): + if self.check_token(KeyToken): + token = self.get_token() + if not self.check_token(KeyToken, ValueToken, BlockEndToken): + self.states.append(self.parse_block_mapping_value) + return self.parse_block_node_or_indentless_sequence() + else: + self.state = self.parse_block_mapping_value + return self.process_empty_scalar(token.end_mark) + if not self.check_token(BlockEndToken): + token = self.peek_token() + raise ParserError("while parsing a block mapping", self.marks[-1], + "expected , but found %r" % token.id, token.start_mark) + token = self.get_token() + event = MappingEndEvent(token.start_mark, token.end_mark) + self.state = self.states.pop() + self.marks.pop() + return event + + def parse_block_mapping_value(self): + if self.check_token(ValueToken): + token = self.get_token() + if not self.check_token(KeyToken, ValueToken, BlockEndToken): + self.states.append(self.parse_block_mapping_key) + return self.parse_block_node_or_indentless_sequence() + else: + self.state = self.parse_block_mapping_key + return self.process_empty_scalar(token.end_mark) + else: + self.state = self.parse_block_mapping_key + token = self.peek_token() + return self.process_empty_scalar(token.start_mark) + + # flow_sequence ::= FLOW-SEQUENCE-START + # (flow_sequence_entry FLOW-ENTRY)* + # flow_sequence_entry? + # FLOW-SEQUENCE-END + # flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? + # + # Note that while production rules for both flow_sequence_entry and + # flow_mapping_entry are equal, their interpretations are different. + # For `flow_sequence_entry`, the part `KEY flow_node? (VALUE flow_node?)?` + # generate an inline mapping (set syntax). + + def parse_flow_sequence_first_entry(self): + token = self.get_token() + self.marks.append(token.start_mark) + return self.parse_flow_sequence_entry(first=True) + + def parse_flow_sequence_entry(self, first=False): + if not self.check_token(FlowSequenceEndToken): + if not first: + if self.check_token(FlowEntryToken): + self.get_token() + else: + token = self.peek_token() + raise ParserError("while parsing a flow sequence", self.marks[-1], + "expected ',' or ']', but got %r" % token.id, token.start_mark) + + if self.check_token(KeyToken): + token = self.peek_token() + event = MappingStartEvent(None, None, True, + token.start_mark, token.end_mark, + flow_style=True) + self.state = self.parse_flow_sequence_entry_mapping_key + return event + elif not self.check_token(FlowSequenceEndToken): + self.states.append(self.parse_flow_sequence_entry) + return self.parse_flow_node() + token = self.get_token() + event = SequenceEndEvent(token.start_mark, token.end_mark) + self.state = self.states.pop() + self.marks.pop() + return event + + def parse_flow_sequence_entry_mapping_key(self): + token = self.get_token() + if not self.check_token(ValueToken, + FlowEntryToken, FlowSequenceEndToken): + self.states.append(self.parse_flow_sequence_entry_mapping_value) + return self.parse_flow_node() + else: + self.state = self.parse_flow_sequence_entry_mapping_value + return self.process_empty_scalar(token.end_mark) + + def parse_flow_sequence_entry_mapping_value(self): + if self.check_token(ValueToken): + token = self.get_token() + if not self.check_token(FlowEntryToken, FlowSequenceEndToken): + self.states.append(self.parse_flow_sequence_entry_mapping_end) + return self.parse_flow_node() + else: + self.state = self.parse_flow_sequence_entry_mapping_end + return self.process_empty_scalar(token.end_mark) + else: + self.state = self.parse_flow_sequence_entry_mapping_end + token = self.peek_token() + return self.process_empty_scalar(token.start_mark) + + def parse_flow_sequence_entry_mapping_end(self): + self.state = self.parse_flow_sequence_entry + token = self.peek_token() + return MappingEndEvent(token.start_mark, token.start_mark) + + # flow_mapping ::= FLOW-MAPPING-START + # (flow_mapping_entry FLOW-ENTRY)* + # flow_mapping_entry? + # FLOW-MAPPING-END + # flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? + + def parse_flow_mapping_first_key(self): + token = self.get_token() + self.marks.append(token.start_mark) + return self.parse_flow_mapping_key(first=True) + + def parse_flow_mapping_key(self, first=False): + if not self.check_token(FlowMappingEndToken): + if not first: + if self.check_token(FlowEntryToken): + self.get_token() + else: + token = self.peek_token() + raise ParserError("while parsing a flow mapping", self.marks[-1], + "expected ',' or '}', but got %r" % token.id, token.start_mark) + if self.check_token(KeyToken): + token = self.get_token() + if not self.check_token(ValueToken, + FlowEntryToken, FlowMappingEndToken): + self.states.append(self.parse_flow_mapping_value) + return self.parse_flow_node() + else: + self.state = self.parse_flow_mapping_value + return self.process_empty_scalar(token.end_mark) + elif not self.check_token(FlowMappingEndToken): + self.states.append(self.parse_flow_mapping_empty_value) + return self.parse_flow_node() + token = self.get_token() + event = MappingEndEvent(token.start_mark, token.end_mark) + self.state = self.states.pop() + self.marks.pop() + return event + + def parse_flow_mapping_value(self): + if self.check_token(ValueToken): + token = self.get_token() + if not self.check_token(FlowEntryToken, FlowMappingEndToken): + self.states.append(self.parse_flow_mapping_key) + return self.parse_flow_node() + else: + self.state = self.parse_flow_mapping_key + return self.process_empty_scalar(token.end_mark) + else: + self.state = self.parse_flow_mapping_key + token = self.peek_token() + return self.process_empty_scalar(token.start_mark) + + def parse_flow_mapping_empty_value(self): + self.state = self.parse_flow_mapping_key + return self.process_empty_scalar(self.peek_token().start_mark) + + def process_empty_scalar(self, mark): + return ScalarEvent(None, None, (True, False), '', mark, mark) + diff --git a/venv/lib/python3.12/site-packages/yaml/reader.py b/venv/lib/python3.12/site-packages/yaml/reader.py new file mode 100644 index 00000000..774b0219 --- /dev/null +++ b/venv/lib/python3.12/site-packages/yaml/reader.py @@ -0,0 +1,185 @@ +# This module contains abstractions for the input stream. You don't have to +# looks further, there are no pretty code. +# +# We define two classes here. +# +# Mark(source, line, column) +# It's just a record and its only use is producing nice error messages. +# Parser does not use it for any other purposes. +# +# Reader(source, data) +# Reader determines the encoding of `data` and converts it to unicode. +# Reader provides the following methods and attributes: +# reader.peek(length=1) - return the next `length` characters +# reader.forward(length=1) - move the current position to `length` characters. +# reader.index - the number of the current character. +# reader.line, stream.column - the line and the column of the current character. + +__all__ = ['Reader', 'ReaderError'] + +from .error import YAMLError, Mark + +import codecs, re + +class ReaderError(YAMLError): + + def __init__(self, name, position, character, encoding, reason): + self.name = name + self.character = character + self.position = position + self.encoding = encoding + self.reason = reason + + def __str__(self): + if isinstance(self.character, bytes): + return "'%s' codec can't decode byte #x%02x: %s\n" \ + " in \"%s\", position %d" \ + % (self.encoding, ord(self.character), self.reason, + self.name, self.position) + else: + return "unacceptable character #x%04x: %s\n" \ + " in \"%s\", position %d" \ + % (self.character, self.reason, + self.name, self.position) + +class Reader(object): + # Reader: + # - determines the data encoding and converts it to a unicode string, + # - checks if characters are in allowed range, + # - adds '\0' to the end. + + # Reader accepts + # - a `bytes` object, + # - a `str` object, + # - a file-like object with its `read` method returning `str`, + # - a file-like object with its `read` method returning `unicode`. + + # Yeah, it's ugly and slow. + + def __init__(self, stream): + self.name = None + self.stream = None + self.stream_pointer = 0 + self.eof = True + self.buffer = '' + self.pointer = 0 + self.raw_buffer = None + self.raw_decode = None + self.encoding = None + self.index = 0 + self.line = 0 + self.column = 0 + if isinstance(stream, str): + self.name = "" + self.check_printable(stream) + self.buffer = stream+'\0' + elif isinstance(stream, bytes): + self.name = "" + self.raw_buffer = stream + self.determine_encoding() + else: + self.stream = stream + self.name = getattr(stream, 'name', "") + self.eof = False + self.raw_buffer = None + self.determine_encoding() + + def peek(self, index=0): + try: + return self.buffer[self.pointer+index] + except IndexError: + self.update(index+1) + return self.buffer[self.pointer+index] + + def prefix(self, length=1): + if self.pointer+length >= len(self.buffer): + self.update(length) + return self.buffer[self.pointer:self.pointer+length] + + def forward(self, length=1): + if self.pointer+length+1 >= len(self.buffer): + self.update(length+1) + while length: + ch = self.buffer[self.pointer] + self.pointer += 1 + self.index += 1 + if ch in '\n\x85\u2028\u2029' \ + or (ch == '\r' and self.buffer[self.pointer] != '\n'): + self.line += 1 + self.column = 0 + elif ch != '\uFEFF': + self.column += 1 + length -= 1 + + def get_mark(self): + if self.stream is None: + return Mark(self.name, self.index, self.line, self.column, + self.buffer, self.pointer) + else: + return Mark(self.name, self.index, self.line, self.column, + None, None) + + def determine_encoding(self): + while not self.eof and (self.raw_buffer is None or len(self.raw_buffer) < 2): + self.update_raw() + if isinstance(self.raw_buffer, bytes): + if self.raw_buffer.startswith(codecs.BOM_UTF16_LE): + self.raw_decode = codecs.utf_16_le_decode + self.encoding = 'utf-16-le' + elif self.raw_buffer.startswith(codecs.BOM_UTF16_BE): + self.raw_decode = codecs.utf_16_be_decode + self.encoding = 'utf-16-be' + else: + self.raw_decode = codecs.utf_8_decode + self.encoding = 'utf-8' + self.update(1) + + NON_PRINTABLE = re.compile('[^\x09\x0A\x0D\x20-\x7E\x85\xA0-\uD7FF\uE000-\uFFFD\U00010000-\U0010ffff]') + def check_printable(self, data): + match = self.NON_PRINTABLE.search(data) + if match: + character = match.group() + position = self.index+(len(self.buffer)-self.pointer)+match.start() + raise ReaderError(self.name, position, ord(character), + 'unicode', "special characters are not allowed") + + def update(self, length): + if self.raw_buffer is None: + return + self.buffer = self.buffer[self.pointer:] + self.pointer = 0 + while len(self.buffer) < length: + if not self.eof: + self.update_raw() + if self.raw_decode is not None: + try: + data, converted = self.raw_decode(self.raw_buffer, + 'strict', self.eof) + except UnicodeDecodeError as exc: + character = self.raw_buffer[exc.start] + if self.stream is not None: + position = self.stream_pointer-len(self.raw_buffer)+exc.start + else: + position = exc.start + raise ReaderError(self.name, position, character, + exc.encoding, exc.reason) + else: + data = self.raw_buffer + converted = len(data) + self.check_printable(data) + self.buffer += data + self.raw_buffer = self.raw_buffer[converted:] + if self.eof: + self.buffer += '\0' + self.raw_buffer = None + break + + def update_raw(self, size=4096): + data = self.stream.read(size) + if self.raw_buffer is None: + self.raw_buffer = data + else: + self.raw_buffer += data + self.stream_pointer += len(data) + if not data: + self.eof = True diff --git a/venv/lib/python3.12/site-packages/yaml/representer.py b/venv/lib/python3.12/site-packages/yaml/representer.py new file mode 100644 index 00000000..808ca06d --- /dev/null +++ b/venv/lib/python3.12/site-packages/yaml/representer.py @@ -0,0 +1,389 @@ + +__all__ = ['BaseRepresenter', 'SafeRepresenter', 'Representer', + 'RepresenterError'] + +from .error import * +from .nodes import * + +import datetime, copyreg, types, base64, collections + +class RepresenterError(YAMLError): + pass + +class BaseRepresenter: + + yaml_representers = {} + yaml_multi_representers = {} + + def __init__(self, default_style=None, default_flow_style=False, sort_keys=True): + self.default_style = default_style + self.sort_keys = sort_keys + self.default_flow_style = default_flow_style + self.represented_objects = {} + self.object_keeper = [] + self.alias_key = None + + def represent(self, data): + node = self.represent_data(data) + self.serialize(node) + self.represented_objects = {} + self.object_keeper = [] + self.alias_key = None + + def represent_data(self, data): + if self.ignore_aliases(data): + self.alias_key = None + else: + self.alias_key = id(data) + if self.alias_key is not None: + if self.alias_key in self.represented_objects: + node = self.represented_objects[self.alias_key] + #if node is None: + # raise RepresenterError("recursive objects are not allowed: %r" % data) + return node + #self.represented_objects[alias_key] = None + self.object_keeper.append(data) + data_types = type(data).__mro__ + if data_types[0] in self.yaml_representers: + node = self.yaml_representers[data_types[0]](self, data) + else: + for data_type in data_types: + if data_type in self.yaml_multi_representers: + node = self.yaml_multi_representers[data_type](self, data) + break + else: + if None in self.yaml_multi_representers: + node = self.yaml_multi_representers[None](self, data) + elif None in self.yaml_representers: + node = self.yaml_representers[None](self, data) + else: + node = ScalarNode(None, str(data)) + #if alias_key is not None: + # self.represented_objects[alias_key] = node + return node + + @classmethod + def add_representer(cls, data_type, representer): + if not 'yaml_representers' in cls.__dict__: + cls.yaml_representers = cls.yaml_representers.copy() + cls.yaml_representers[data_type] = representer + + @classmethod + def add_multi_representer(cls, data_type, representer): + if not 'yaml_multi_representers' in cls.__dict__: + cls.yaml_multi_representers = cls.yaml_multi_representers.copy() + cls.yaml_multi_representers[data_type] = representer + + def represent_scalar(self, tag, value, style=None): + if style is None: + style = self.default_style + node = ScalarNode(tag, value, style=style) + if self.alias_key is not None: + self.represented_objects[self.alias_key] = node + return node + + def represent_sequence(self, tag, sequence, flow_style=None): + value = [] + node = SequenceNode(tag, value, flow_style=flow_style) + if self.alias_key is not None: + self.represented_objects[self.alias_key] = node + best_style = True + for item in sequence: + node_item = self.represent_data(item) + if not (isinstance(node_item, ScalarNode) and not node_item.style): + best_style = False + value.append(node_item) + if flow_style is None: + if self.default_flow_style is not None: + node.flow_style = self.default_flow_style + else: + node.flow_style = best_style + return node + + def represent_mapping(self, tag, mapping, flow_style=None): + value = [] + node = MappingNode(tag, value, flow_style=flow_style) + if self.alias_key is not None: + self.represented_objects[self.alias_key] = node + best_style = True + if hasattr(mapping, 'items'): + mapping = list(mapping.items()) + if self.sort_keys: + try: + mapping = sorted(mapping) + except TypeError: + pass + for item_key, item_value in mapping: + node_key = self.represent_data(item_key) + node_value = self.represent_data(item_value) + if not (isinstance(node_key, ScalarNode) and not node_key.style): + best_style = False + if not (isinstance(node_value, ScalarNode) and not node_value.style): + best_style = False + value.append((node_key, node_value)) + if flow_style is None: + if self.default_flow_style is not None: + node.flow_style = self.default_flow_style + else: + node.flow_style = best_style + return node + + def ignore_aliases(self, data): + return False + +class SafeRepresenter(BaseRepresenter): + + def ignore_aliases(self, data): + if data is None: + return True + if isinstance(data, tuple) and data == (): + return True + if isinstance(data, (str, bytes, bool, int, float)): + return True + + def represent_none(self, data): + return self.represent_scalar('tag:yaml.org,2002:null', 'null') + + def represent_str(self, data): + return self.represent_scalar('tag:yaml.org,2002:str', data) + + def represent_binary(self, data): + if hasattr(base64, 'encodebytes'): + data = base64.encodebytes(data).decode('ascii') + else: + data = base64.encodestring(data).decode('ascii') + return self.represent_scalar('tag:yaml.org,2002:binary', data, style='|') + + def represent_bool(self, data): + if data: + value = 'true' + else: + value = 'false' + return self.represent_scalar('tag:yaml.org,2002:bool', value) + + def represent_int(self, data): + return self.represent_scalar('tag:yaml.org,2002:int', str(data)) + + inf_value = 1e300 + while repr(inf_value) != repr(inf_value*inf_value): + inf_value *= inf_value + + def represent_float(self, data): + if data != data or (data == 0.0 and data == 1.0): + value = '.nan' + elif data == self.inf_value: + value = '.inf' + elif data == -self.inf_value: + value = '-.inf' + else: + value = repr(data).lower() + # Note that in some cases `repr(data)` represents a float number + # without the decimal parts. For instance: + # >>> repr(1e17) + # '1e17' + # Unfortunately, this is not a valid float representation according + # to the definition of the `!!float` tag. We fix this by adding + # '.0' before the 'e' symbol. + if '.' not in value and 'e' in value: + value = value.replace('e', '.0e', 1) + return self.represent_scalar('tag:yaml.org,2002:float', value) + + def represent_list(self, data): + #pairs = (len(data) > 0 and isinstance(data, list)) + #if pairs: + # for item in data: + # if not isinstance(item, tuple) or len(item) != 2: + # pairs = False + # break + #if not pairs: + return self.represent_sequence('tag:yaml.org,2002:seq', data) + #value = [] + #for item_key, item_value in data: + # value.append(self.represent_mapping(u'tag:yaml.org,2002:map', + # [(item_key, item_value)])) + #return SequenceNode(u'tag:yaml.org,2002:pairs', value) + + def represent_dict(self, data): + return self.represent_mapping('tag:yaml.org,2002:map', data) + + def represent_set(self, data): + value = {} + for key in data: + value[key] = None + return self.represent_mapping('tag:yaml.org,2002:set', value) + + def represent_date(self, data): + value = data.isoformat() + return self.represent_scalar('tag:yaml.org,2002:timestamp', value) + + def represent_datetime(self, data): + value = data.isoformat(' ') + return self.represent_scalar('tag:yaml.org,2002:timestamp', value) + + def represent_yaml_object(self, tag, data, cls, flow_style=None): + if hasattr(data, '__getstate__'): + state = data.__getstate__() + else: + state = data.__dict__.copy() + return self.represent_mapping(tag, state, flow_style=flow_style) + + def represent_undefined(self, data): + raise RepresenterError("cannot represent an object", data) + +SafeRepresenter.add_representer(type(None), + SafeRepresenter.represent_none) + +SafeRepresenter.add_representer(str, + SafeRepresenter.represent_str) + +SafeRepresenter.add_representer(bytes, + SafeRepresenter.represent_binary) + +SafeRepresenter.add_representer(bool, + SafeRepresenter.represent_bool) + +SafeRepresenter.add_representer(int, + SafeRepresenter.represent_int) + +SafeRepresenter.add_representer(float, + SafeRepresenter.represent_float) + +SafeRepresenter.add_representer(list, + SafeRepresenter.represent_list) + +SafeRepresenter.add_representer(tuple, + SafeRepresenter.represent_list) + +SafeRepresenter.add_representer(dict, + SafeRepresenter.represent_dict) + +SafeRepresenter.add_representer(set, + SafeRepresenter.represent_set) + +SafeRepresenter.add_representer(datetime.date, + SafeRepresenter.represent_date) + +SafeRepresenter.add_representer(datetime.datetime, + SafeRepresenter.represent_datetime) + +SafeRepresenter.add_representer(None, + SafeRepresenter.represent_undefined) + +class Representer(SafeRepresenter): + + def represent_complex(self, data): + if data.imag == 0.0: + data = '%r' % data.real + elif data.real == 0.0: + data = '%rj' % data.imag + elif data.imag > 0: + data = '%r+%rj' % (data.real, data.imag) + else: + data = '%r%rj' % (data.real, data.imag) + return self.represent_scalar('tag:yaml.org,2002:python/complex', data) + + def represent_tuple(self, data): + return self.represent_sequence('tag:yaml.org,2002:python/tuple', data) + + def represent_name(self, data): + name = '%s.%s' % (data.__module__, data.__name__) + return self.represent_scalar('tag:yaml.org,2002:python/name:'+name, '') + + def represent_module(self, data): + return self.represent_scalar( + 'tag:yaml.org,2002:python/module:'+data.__name__, '') + + def represent_object(self, data): + # We use __reduce__ API to save the data. data.__reduce__ returns + # a tuple of length 2-5: + # (function, args, state, listitems, dictitems) + + # For reconstructing, we calls function(*args), then set its state, + # listitems, and dictitems if they are not None. + + # A special case is when function.__name__ == '__newobj__'. In this + # case we create the object with args[0].__new__(*args). + + # Another special case is when __reduce__ returns a string - we don't + # support it. + + # We produce a !!python/object, !!python/object/new or + # !!python/object/apply node. + + cls = type(data) + if cls in copyreg.dispatch_table: + reduce = copyreg.dispatch_table[cls](data) + elif hasattr(data, '__reduce_ex__'): + reduce = data.__reduce_ex__(2) + elif hasattr(data, '__reduce__'): + reduce = data.__reduce__() + else: + raise RepresenterError("cannot represent an object", data) + reduce = (list(reduce)+[None]*5)[:5] + function, args, state, listitems, dictitems = reduce + args = list(args) + if state is None: + state = {} + if listitems is not None: + listitems = list(listitems) + if dictitems is not None: + dictitems = dict(dictitems) + if function.__name__ == '__newobj__': + function = args[0] + args = args[1:] + tag = 'tag:yaml.org,2002:python/object/new:' + newobj = True + else: + tag = 'tag:yaml.org,2002:python/object/apply:' + newobj = False + function_name = '%s.%s' % (function.__module__, function.__name__) + if not args and not listitems and not dictitems \ + and isinstance(state, dict) and newobj: + return self.represent_mapping( + 'tag:yaml.org,2002:python/object:'+function_name, state) + if not listitems and not dictitems \ + and isinstance(state, dict) and not state: + return self.represent_sequence(tag+function_name, args) + value = {} + if args: + value['args'] = args + if state or not isinstance(state, dict): + value['state'] = state + if listitems: + value['listitems'] = listitems + if dictitems: + value['dictitems'] = dictitems + return self.represent_mapping(tag+function_name, value) + + def represent_ordered_dict(self, data): + # Provide uniform representation across different Python versions. + data_type = type(data) + tag = 'tag:yaml.org,2002:python/object/apply:%s.%s' \ + % (data_type.__module__, data_type.__name__) + items = [[key, value] for key, value in data.items()] + return self.represent_sequence(tag, [items]) + +Representer.add_representer(complex, + Representer.represent_complex) + +Representer.add_representer(tuple, + Representer.represent_tuple) + +Representer.add_multi_representer(type, + Representer.represent_name) + +Representer.add_representer(collections.OrderedDict, + Representer.represent_ordered_dict) + +Representer.add_representer(types.FunctionType, + Representer.represent_name) + +Representer.add_representer(types.BuiltinFunctionType, + Representer.represent_name) + +Representer.add_representer(types.ModuleType, + Representer.represent_module) + +Representer.add_multi_representer(object, + Representer.represent_object) + diff --git a/venv/lib/python3.12/site-packages/yaml/resolver.py b/venv/lib/python3.12/site-packages/yaml/resolver.py new file mode 100644 index 00000000..3522bdaa --- /dev/null +++ b/venv/lib/python3.12/site-packages/yaml/resolver.py @@ -0,0 +1,227 @@ + +__all__ = ['BaseResolver', 'Resolver'] + +from .error import * +from .nodes import * + +import re + +class ResolverError(YAMLError): + pass + +class BaseResolver: + + DEFAULT_SCALAR_TAG = 'tag:yaml.org,2002:str' + DEFAULT_SEQUENCE_TAG = 'tag:yaml.org,2002:seq' + DEFAULT_MAPPING_TAG = 'tag:yaml.org,2002:map' + + yaml_implicit_resolvers = {} + yaml_path_resolvers = {} + + def __init__(self): + self.resolver_exact_paths = [] + self.resolver_prefix_paths = [] + + @classmethod + def add_implicit_resolver(cls, tag, regexp, first): + if not 'yaml_implicit_resolvers' in cls.__dict__: + implicit_resolvers = {} + for key in cls.yaml_implicit_resolvers: + implicit_resolvers[key] = cls.yaml_implicit_resolvers[key][:] + cls.yaml_implicit_resolvers = implicit_resolvers + if first is None: + first = [None] + for ch in first: + cls.yaml_implicit_resolvers.setdefault(ch, []).append((tag, regexp)) + + @classmethod + def add_path_resolver(cls, tag, path, kind=None): + # Note: `add_path_resolver` is experimental. The API could be changed. + # `new_path` is a pattern that is matched against the path from the + # root to the node that is being considered. `node_path` elements are + # tuples `(node_check, index_check)`. `node_check` is a node class: + # `ScalarNode`, `SequenceNode`, `MappingNode` or `None`. `None` + # matches any kind of a node. `index_check` could be `None`, a boolean + # value, a string value, or a number. `None` and `False` match against + # any _value_ of sequence and mapping nodes. `True` matches against + # any _key_ of a mapping node. A string `index_check` matches against + # a mapping value that corresponds to a scalar key which content is + # equal to the `index_check` value. An integer `index_check` matches + # against a sequence value with the index equal to `index_check`. + if not 'yaml_path_resolvers' in cls.__dict__: + cls.yaml_path_resolvers = cls.yaml_path_resolvers.copy() + new_path = [] + for element in path: + if isinstance(element, (list, tuple)): + if len(element) == 2: + node_check, index_check = element + elif len(element) == 1: + node_check = element[0] + index_check = True + else: + raise ResolverError("Invalid path element: %s" % element) + else: + node_check = None + index_check = element + if node_check is str: + node_check = ScalarNode + elif node_check is list: + node_check = SequenceNode + elif node_check is dict: + node_check = MappingNode + elif node_check not in [ScalarNode, SequenceNode, MappingNode] \ + and not isinstance(node_check, str) \ + and node_check is not None: + raise ResolverError("Invalid node checker: %s" % node_check) + if not isinstance(index_check, (str, int)) \ + and index_check is not None: + raise ResolverError("Invalid index checker: %s" % index_check) + new_path.append((node_check, index_check)) + if kind is str: + kind = ScalarNode + elif kind is list: + kind = SequenceNode + elif kind is dict: + kind = MappingNode + elif kind not in [ScalarNode, SequenceNode, MappingNode] \ + and kind is not None: + raise ResolverError("Invalid node kind: %s" % kind) + cls.yaml_path_resolvers[tuple(new_path), kind] = tag + + def descend_resolver(self, current_node, current_index): + if not self.yaml_path_resolvers: + return + exact_paths = {} + prefix_paths = [] + if current_node: + depth = len(self.resolver_prefix_paths) + for path, kind in self.resolver_prefix_paths[-1]: + if self.check_resolver_prefix(depth, path, kind, + current_node, current_index): + if len(path) > depth: + prefix_paths.append((path, kind)) + else: + exact_paths[kind] = self.yaml_path_resolvers[path, kind] + else: + for path, kind in self.yaml_path_resolvers: + if not path: + exact_paths[kind] = self.yaml_path_resolvers[path, kind] + else: + prefix_paths.append((path, kind)) + self.resolver_exact_paths.append(exact_paths) + self.resolver_prefix_paths.append(prefix_paths) + + def ascend_resolver(self): + if not self.yaml_path_resolvers: + return + self.resolver_exact_paths.pop() + self.resolver_prefix_paths.pop() + + def check_resolver_prefix(self, depth, path, kind, + current_node, current_index): + node_check, index_check = path[depth-1] + if isinstance(node_check, str): + if current_node.tag != node_check: + return + elif node_check is not None: + if not isinstance(current_node, node_check): + return + if index_check is True and current_index is not None: + return + if (index_check is False or index_check is None) \ + and current_index is None: + return + if isinstance(index_check, str): + if not (isinstance(current_index, ScalarNode) + and index_check == current_index.value): + return + elif isinstance(index_check, int) and not isinstance(index_check, bool): + if index_check != current_index: + return + return True + + def resolve(self, kind, value, implicit): + if kind is ScalarNode and implicit[0]: + if value == '': + resolvers = self.yaml_implicit_resolvers.get('', []) + else: + resolvers = self.yaml_implicit_resolvers.get(value[0], []) + wildcard_resolvers = self.yaml_implicit_resolvers.get(None, []) + for tag, regexp in resolvers + wildcard_resolvers: + if regexp.match(value): + return tag + implicit = implicit[1] + if self.yaml_path_resolvers: + exact_paths = self.resolver_exact_paths[-1] + if kind in exact_paths: + return exact_paths[kind] + if None in exact_paths: + return exact_paths[None] + if kind is ScalarNode: + return self.DEFAULT_SCALAR_TAG + elif kind is SequenceNode: + return self.DEFAULT_SEQUENCE_TAG + elif kind is MappingNode: + return self.DEFAULT_MAPPING_TAG + +class Resolver(BaseResolver): + pass + +Resolver.add_implicit_resolver( + 'tag:yaml.org,2002:bool', + re.compile(r'''^(?:yes|Yes|YES|no|No|NO + |true|True|TRUE|false|False|FALSE + |on|On|ON|off|Off|OFF)$''', re.X), + list('yYnNtTfFoO')) + +Resolver.add_implicit_resolver( + 'tag:yaml.org,2002:float', + re.compile(r'''^(?:[-+]?(?:[0-9][0-9_]*)\.[0-9_]*(?:[eE][-+][0-9]+)? + |\.[0-9][0-9_]*(?:[eE][-+][0-9]+)? + |[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\.[0-9_]* + |[-+]?\.(?:inf|Inf|INF) + |\.(?:nan|NaN|NAN))$''', re.X), + list('-+0123456789.')) + +Resolver.add_implicit_resolver( + 'tag:yaml.org,2002:int', + re.compile(r'''^(?:[-+]?0b[0-1_]+ + |[-+]?0[0-7_]+ + |[-+]?(?:0|[1-9][0-9_]*) + |[-+]?0x[0-9a-fA-F_]+ + |[-+]?[1-9][0-9_]*(?::[0-5]?[0-9])+)$''', re.X), + list('-+0123456789')) + +Resolver.add_implicit_resolver( + 'tag:yaml.org,2002:merge', + re.compile(r'^(?:<<)$'), + ['<']) + +Resolver.add_implicit_resolver( + 'tag:yaml.org,2002:null', + re.compile(r'''^(?: ~ + |null|Null|NULL + | )$''', re.X), + ['~', 'n', 'N', '']) + +Resolver.add_implicit_resolver( + 'tag:yaml.org,2002:timestamp', + re.compile(r'''^(?:[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9] + |[0-9][0-9][0-9][0-9] -[0-9][0-9]? -[0-9][0-9]? + (?:[Tt]|[ \t]+)[0-9][0-9]? + :[0-9][0-9] :[0-9][0-9] (?:\.[0-9]*)? + (?:[ \t]*(?:Z|[-+][0-9][0-9]?(?::[0-9][0-9])?))?)$''', re.X), + list('0123456789')) + +Resolver.add_implicit_resolver( + 'tag:yaml.org,2002:value', + re.compile(r'^(?:=)$'), + ['=']) + +# The following resolver is only for documentation purposes. It cannot work +# because plain scalars cannot start with '!', '&', or '*'. +Resolver.add_implicit_resolver( + 'tag:yaml.org,2002:yaml', + re.compile(r'^(?:!|&|\*)$'), + list('!&*')) + diff --git a/venv/lib/python3.12/site-packages/yaml/scanner.py b/venv/lib/python3.12/site-packages/yaml/scanner.py new file mode 100644 index 00000000..de925b07 --- /dev/null +++ b/venv/lib/python3.12/site-packages/yaml/scanner.py @@ -0,0 +1,1435 @@ + +# Scanner produces tokens of the following types: +# STREAM-START +# STREAM-END +# DIRECTIVE(name, value) +# DOCUMENT-START +# DOCUMENT-END +# BLOCK-SEQUENCE-START +# BLOCK-MAPPING-START +# BLOCK-END +# FLOW-SEQUENCE-START +# FLOW-MAPPING-START +# FLOW-SEQUENCE-END +# FLOW-MAPPING-END +# BLOCK-ENTRY +# FLOW-ENTRY +# KEY +# VALUE +# ALIAS(value) +# ANCHOR(value) +# TAG(value) +# SCALAR(value, plain, style) +# +# Read comments in the Scanner code for more details. +# + +__all__ = ['Scanner', 'ScannerError'] + +from .error import MarkedYAMLError +from .tokens import * + +class ScannerError(MarkedYAMLError): + pass + +class SimpleKey: + # See below simple keys treatment. + + def __init__(self, token_number, required, index, line, column, mark): + self.token_number = token_number + self.required = required + self.index = index + self.line = line + self.column = column + self.mark = mark + +class Scanner: + + def __init__(self): + """Initialize the scanner.""" + # It is assumed that Scanner and Reader will have a common descendant. + # Reader do the dirty work of checking for BOM and converting the + # input data to Unicode. It also adds NUL to the end. + # + # Reader supports the following methods + # self.peek(i=0) # peek the next i-th character + # self.prefix(l=1) # peek the next l characters + # self.forward(l=1) # read the next l characters and move the pointer. + + # Had we reached the end of the stream? + self.done = False + + # The number of unclosed '{' and '['. `flow_level == 0` means block + # context. + self.flow_level = 0 + + # List of processed tokens that are not yet emitted. + self.tokens = [] + + # Add the STREAM-START token. + self.fetch_stream_start() + + # Number of tokens that were emitted through the `get_token` method. + self.tokens_taken = 0 + + # The current indentation level. + self.indent = -1 + + # Past indentation levels. + self.indents = [] + + # Variables related to simple keys treatment. + + # A simple key is a key that is not denoted by the '?' indicator. + # Example of simple keys: + # --- + # block simple key: value + # ? not a simple key: + # : { flow simple key: value } + # We emit the KEY token before all keys, so when we find a potential + # simple key, we try to locate the corresponding ':' indicator. + # Simple keys should be limited to a single line and 1024 characters. + + # Can a simple key start at the current position? A simple key may + # start: + # - at the beginning of the line, not counting indentation spaces + # (in block context), + # - after '{', '[', ',' (in the flow context), + # - after '?', ':', '-' (in the block context). + # In the block context, this flag also signifies if a block collection + # may start at the current position. + self.allow_simple_key = True + + # Keep track of possible simple keys. This is a dictionary. The key + # is `flow_level`; there can be no more that one possible simple key + # for each level. The value is a SimpleKey record: + # (token_number, required, index, line, column, mark) + # A simple key may start with ALIAS, ANCHOR, TAG, SCALAR(flow), + # '[', or '{' tokens. + self.possible_simple_keys = {} + + # Public methods. + + def check_token(self, *choices): + # Check if the next token is one of the given types. + while self.need_more_tokens(): + self.fetch_more_tokens() + if self.tokens: + if not choices: + return True + for choice in choices: + if isinstance(self.tokens[0], choice): + return True + return False + + def peek_token(self): + # Return the next token, but do not delete if from the queue. + # Return None if no more tokens. + while self.need_more_tokens(): + self.fetch_more_tokens() + if self.tokens: + return self.tokens[0] + else: + return None + + def get_token(self): + # Return the next token. + while self.need_more_tokens(): + self.fetch_more_tokens() + if self.tokens: + self.tokens_taken += 1 + return self.tokens.pop(0) + + # Private methods. + + def need_more_tokens(self): + if self.done: + return False + if not self.tokens: + return True + # The current token may be a potential simple key, so we + # need to look further. + self.stale_possible_simple_keys() + if self.next_possible_simple_key() == self.tokens_taken: + return True + + def fetch_more_tokens(self): + + # Eat whitespaces and comments until we reach the next token. + self.scan_to_next_token() + + # Remove obsolete possible simple keys. + self.stale_possible_simple_keys() + + # Compare the current indentation and column. It may add some tokens + # and decrease the current indentation level. + self.unwind_indent(self.column) + + # Peek the next character. + ch = self.peek() + + # Is it the end of stream? + if ch == '\0': + return self.fetch_stream_end() + + # Is it a directive? + if ch == '%' and self.check_directive(): + return self.fetch_directive() + + # Is it the document start? + if ch == '-' and self.check_document_start(): + return self.fetch_document_start() + + # Is it the document end? + if ch == '.' and self.check_document_end(): + return self.fetch_document_end() + + # TODO: support for BOM within a stream. + #if ch == '\uFEFF': + # return self.fetch_bom() <-- issue BOMToken + + # Note: the order of the following checks is NOT significant. + + # Is it the flow sequence start indicator? + if ch == '[': + return self.fetch_flow_sequence_start() + + # Is it the flow mapping start indicator? + if ch == '{': + return self.fetch_flow_mapping_start() + + # Is it the flow sequence end indicator? + if ch == ']': + return self.fetch_flow_sequence_end() + + # Is it the flow mapping end indicator? + if ch == '}': + return self.fetch_flow_mapping_end() + + # Is it the flow entry indicator? + if ch == ',': + return self.fetch_flow_entry() + + # Is it the block entry indicator? + if ch == '-' and self.check_block_entry(): + return self.fetch_block_entry() + + # Is it the key indicator? + if ch == '?' and self.check_key(): + return self.fetch_key() + + # Is it the value indicator? + if ch == ':' and self.check_value(): + return self.fetch_value() + + # Is it an alias? + if ch == '*': + return self.fetch_alias() + + # Is it an anchor? + if ch == '&': + return self.fetch_anchor() + + # Is it a tag? + if ch == '!': + return self.fetch_tag() + + # Is it a literal scalar? + if ch == '|' and not self.flow_level: + return self.fetch_literal() + + # Is it a folded scalar? + if ch == '>' and not self.flow_level: + return self.fetch_folded() + + # Is it a single quoted scalar? + if ch == '\'': + return self.fetch_single() + + # Is it a double quoted scalar? + if ch == '\"': + return self.fetch_double() + + # It must be a plain scalar then. + if self.check_plain(): + return self.fetch_plain() + + # No? It's an error. Let's produce a nice error message. + raise ScannerError("while scanning for the next token", None, + "found character %r that cannot start any token" % ch, + self.get_mark()) + + # Simple keys treatment. + + def next_possible_simple_key(self): + # Return the number of the nearest possible simple key. Actually we + # don't need to loop through the whole dictionary. We may replace it + # with the following code: + # if not self.possible_simple_keys: + # return None + # return self.possible_simple_keys[ + # min(self.possible_simple_keys.keys())].token_number + min_token_number = None + for level in self.possible_simple_keys: + key = self.possible_simple_keys[level] + if min_token_number is None or key.token_number < min_token_number: + min_token_number = key.token_number + return min_token_number + + def stale_possible_simple_keys(self): + # Remove entries that are no longer possible simple keys. According to + # the YAML specification, simple keys + # - should be limited to a single line, + # - should be no longer than 1024 characters. + # Disabling this procedure will allow simple keys of any length and + # height (may cause problems if indentation is broken though). + for level in list(self.possible_simple_keys): + key = self.possible_simple_keys[level] + if key.line != self.line \ + or self.index-key.index > 1024: + if key.required: + raise ScannerError("while scanning a simple key", key.mark, + "could not find expected ':'", self.get_mark()) + del self.possible_simple_keys[level] + + def save_possible_simple_key(self): + # The next token may start a simple key. We check if it's possible + # and save its position. This function is called for + # ALIAS, ANCHOR, TAG, SCALAR(flow), '[', and '{'. + + # Check if a simple key is required at the current position. + required = not self.flow_level and self.indent == self.column + + # The next token might be a simple key. Let's save it's number and + # position. + if self.allow_simple_key: + self.remove_possible_simple_key() + token_number = self.tokens_taken+len(self.tokens) + key = SimpleKey(token_number, required, + self.index, self.line, self.column, self.get_mark()) + self.possible_simple_keys[self.flow_level] = key + + def remove_possible_simple_key(self): + # Remove the saved possible key position at the current flow level. + if self.flow_level in self.possible_simple_keys: + key = self.possible_simple_keys[self.flow_level] + + if key.required: + raise ScannerError("while scanning a simple key", key.mark, + "could not find expected ':'", self.get_mark()) + + del self.possible_simple_keys[self.flow_level] + + # Indentation functions. + + def unwind_indent(self, column): + + ## In flow context, tokens should respect indentation. + ## Actually the condition should be `self.indent >= column` according to + ## the spec. But this condition will prohibit intuitively correct + ## constructions such as + ## key : { + ## } + #if self.flow_level and self.indent > column: + # raise ScannerError(None, None, + # "invalid indentation or unclosed '[' or '{'", + # self.get_mark()) + + # In the flow context, indentation is ignored. We make the scanner less + # restrictive then specification requires. + if self.flow_level: + return + + # In block context, we may need to issue the BLOCK-END tokens. + while self.indent > column: + mark = self.get_mark() + self.indent = self.indents.pop() + self.tokens.append(BlockEndToken(mark, mark)) + + def add_indent(self, column): + # Check if we need to increase indentation. + if self.indent < column: + self.indents.append(self.indent) + self.indent = column + return True + return False + + # Fetchers. + + def fetch_stream_start(self): + # We always add STREAM-START as the first token and STREAM-END as the + # last token. + + # Read the token. + mark = self.get_mark() + + # Add STREAM-START. + self.tokens.append(StreamStartToken(mark, mark, + encoding=self.encoding)) + + + def fetch_stream_end(self): + + # Set the current indentation to -1. + self.unwind_indent(-1) + + # Reset simple keys. + self.remove_possible_simple_key() + self.allow_simple_key = False + self.possible_simple_keys = {} + + # Read the token. + mark = self.get_mark() + + # Add STREAM-END. + self.tokens.append(StreamEndToken(mark, mark)) + + # The steam is finished. + self.done = True + + def fetch_directive(self): + + # Set the current indentation to -1. + self.unwind_indent(-1) + + # Reset simple keys. + self.remove_possible_simple_key() + self.allow_simple_key = False + + # Scan and add DIRECTIVE. + self.tokens.append(self.scan_directive()) + + def fetch_document_start(self): + self.fetch_document_indicator(DocumentStartToken) + + def fetch_document_end(self): + self.fetch_document_indicator(DocumentEndToken) + + def fetch_document_indicator(self, TokenClass): + + # Set the current indentation to -1. + self.unwind_indent(-1) + + # Reset simple keys. Note that there could not be a block collection + # after '---'. + self.remove_possible_simple_key() + self.allow_simple_key = False + + # Add DOCUMENT-START or DOCUMENT-END. + start_mark = self.get_mark() + self.forward(3) + end_mark = self.get_mark() + self.tokens.append(TokenClass(start_mark, end_mark)) + + def fetch_flow_sequence_start(self): + self.fetch_flow_collection_start(FlowSequenceStartToken) + + def fetch_flow_mapping_start(self): + self.fetch_flow_collection_start(FlowMappingStartToken) + + def fetch_flow_collection_start(self, TokenClass): + + # '[' and '{' may start a simple key. + self.save_possible_simple_key() + + # Increase the flow level. + self.flow_level += 1 + + # Simple keys are allowed after '[' and '{'. + self.allow_simple_key = True + + # Add FLOW-SEQUENCE-START or FLOW-MAPPING-START. + start_mark = self.get_mark() + self.forward() + end_mark = self.get_mark() + self.tokens.append(TokenClass(start_mark, end_mark)) + + def fetch_flow_sequence_end(self): + self.fetch_flow_collection_end(FlowSequenceEndToken) + + def fetch_flow_mapping_end(self): + self.fetch_flow_collection_end(FlowMappingEndToken) + + def fetch_flow_collection_end(self, TokenClass): + + # Reset possible simple key on the current level. + self.remove_possible_simple_key() + + # Decrease the flow level. + self.flow_level -= 1 + + # No simple keys after ']' or '}'. + self.allow_simple_key = False + + # Add FLOW-SEQUENCE-END or FLOW-MAPPING-END. + start_mark = self.get_mark() + self.forward() + end_mark = self.get_mark() + self.tokens.append(TokenClass(start_mark, end_mark)) + + def fetch_flow_entry(self): + + # Simple keys are allowed after ','. + self.allow_simple_key = True + + # Reset possible simple key on the current level. + self.remove_possible_simple_key() + + # Add FLOW-ENTRY. + start_mark = self.get_mark() + self.forward() + end_mark = self.get_mark() + self.tokens.append(FlowEntryToken(start_mark, end_mark)) + + def fetch_block_entry(self): + + # Block context needs additional checks. + if not self.flow_level: + + # Are we allowed to start a new entry? + if not self.allow_simple_key: + raise ScannerError(None, None, + "sequence entries are not allowed here", + self.get_mark()) + + # We may need to add BLOCK-SEQUENCE-START. + if self.add_indent(self.column): + mark = self.get_mark() + self.tokens.append(BlockSequenceStartToken(mark, mark)) + + # It's an error for the block entry to occur in the flow context, + # but we let the parser detect this. + else: + pass + + # Simple keys are allowed after '-'. + self.allow_simple_key = True + + # Reset possible simple key on the current level. + self.remove_possible_simple_key() + + # Add BLOCK-ENTRY. + start_mark = self.get_mark() + self.forward() + end_mark = self.get_mark() + self.tokens.append(BlockEntryToken(start_mark, end_mark)) + + def fetch_key(self): + + # Block context needs additional checks. + if not self.flow_level: + + # Are we allowed to start a key (not necessary a simple)? + if not self.allow_simple_key: + raise ScannerError(None, None, + "mapping keys are not allowed here", + self.get_mark()) + + # We may need to add BLOCK-MAPPING-START. + if self.add_indent(self.column): + mark = self.get_mark() + self.tokens.append(BlockMappingStartToken(mark, mark)) + + # Simple keys are allowed after '?' in the block context. + self.allow_simple_key = not self.flow_level + + # Reset possible simple key on the current level. + self.remove_possible_simple_key() + + # Add KEY. + start_mark = self.get_mark() + self.forward() + end_mark = self.get_mark() + self.tokens.append(KeyToken(start_mark, end_mark)) + + def fetch_value(self): + + # Do we determine a simple key? + if self.flow_level in self.possible_simple_keys: + + # Add KEY. + key = self.possible_simple_keys[self.flow_level] + del self.possible_simple_keys[self.flow_level] + self.tokens.insert(key.token_number-self.tokens_taken, + KeyToken(key.mark, key.mark)) + + # If this key starts a new block mapping, we need to add + # BLOCK-MAPPING-START. + if not self.flow_level: + if self.add_indent(key.column): + self.tokens.insert(key.token_number-self.tokens_taken, + BlockMappingStartToken(key.mark, key.mark)) + + # There cannot be two simple keys one after another. + self.allow_simple_key = False + + # It must be a part of a complex key. + else: + + # Block context needs additional checks. + # (Do we really need them? They will be caught by the parser + # anyway.) + if not self.flow_level: + + # We are allowed to start a complex value if and only if + # we can start a simple key. + if not self.allow_simple_key: + raise ScannerError(None, None, + "mapping values are not allowed here", + self.get_mark()) + + # If this value starts a new block mapping, we need to add + # BLOCK-MAPPING-START. It will be detected as an error later by + # the parser. + if not self.flow_level: + if self.add_indent(self.column): + mark = self.get_mark() + self.tokens.append(BlockMappingStartToken(mark, mark)) + + # Simple keys are allowed after ':' in the block context. + self.allow_simple_key = not self.flow_level + + # Reset possible simple key on the current level. + self.remove_possible_simple_key() + + # Add VALUE. + start_mark = self.get_mark() + self.forward() + end_mark = self.get_mark() + self.tokens.append(ValueToken(start_mark, end_mark)) + + def fetch_alias(self): + + # ALIAS could be a simple key. + self.save_possible_simple_key() + + # No simple keys after ALIAS. + self.allow_simple_key = False + + # Scan and add ALIAS. + self.tokens.append(self.scan_anchor(AliasToken)) + + def fetch_anchor(self): + + # ANCHOR could start a simple key. + self.save_possible_simple_key() + + # No simple keys after ANCHOR. + self.allow_simple_key = False + + # Scan and add ANCHOR. + self.tokens.append(self.scan_anchor(AnchorToken)) + + def fetch_tag(self): + + # TAG could start a simple key. + self.save_possible_simple_key() + + # No simple keys after TAG. + self.allow_simple_key = False + + # Scan and add TAG. + self.tokens.append(self.scan_tag()) + + def fetch_literal(self): + self.fetch_block_scalar(style='|') + + def fetch_folded(self): + self.fetch_block_scalar(style='>') + + def fetch_block_scalar(self, style): + + # A simple key may follow a block scalar. + self.allow_simple_key = True + + # Reset possible simple key on the current level. + self.remove_possible_simple_key() + + # Scan and add SCALAR. + self.tokens.append(self.scan_block_scalar(style)) + + def fetch_single(self): + self.fetch_flow_scalar(style='\'') + + def fetch_double(self): + self.fetch_flow_scalar(style='"') + + def fetch_flow_scalar(self, style): + + # A flow scalar could be a simple key. + self.save_possible_simple_key() + + # No simple keys after flow scalars. + self.allow_simple_key = False + + # Scan and add SCALAR. + self.tokens.append(self.scan_flow_scalar(style)) + + def fetch_plain(self): + + # A plain scalar could be a simple key. + self.save_possible_simple_key() + + # No simple keys after plain scalars. But note that `scan_plain` will + # change this flag if the scan is finished at the beginning of the + # line. + self.allow_simple_key = False + + # Scan and add SCALAR. May change `allow_simple_key`. + self.tokens.append(self.scan_plain()) + + # Checkers. + + def check_directive(self): + + # DIRECTIVE: ^ '%' ... + # The '%' indicator is already checked. + if self.column == 0: + return True + + def check_document_start(self): + + # DOCUMENT-START: ^ '---' (' '|'\n') + if self.column == 0: + if self.prefix(3) == '---' \ + and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029': + return True + + def check_document_end(self): + + # DOCUMENT-END: ^ '...' (' '|'\n') + if self.column == 0: + if self.prefix(3) == '...' \ + and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029': + return True + + def check_block_entry(self): + + # BLOCK-ENTRY: '-' (' '|'\n') + return self.peek(1) in '\0 \t\r\n\x85\u2028\u2029' + + def check_key(self): + + # KEY(flow context): '?' + if self.flow_level: + return True + + # KEY(block context): '?' (' '|'\n') + else: + return self.peek(1) in '\0 \t\r\n\x85\u2028\u2029' + + def check_value(self): + + # VALUE(flow context): ':' + if self.flow_level: + return True + + # VALUE(block context): ':' (' '|'\n') + else: + return self.peek(1) in '\0 \t\r\n\x85\u2028\u2029' + + def check_plain(self): + + # A plain scalar may start with any non-space character except: + # '-', '?', ':', ',', '[', ']', '{', '}', + # '#', '&', '*', '!', '|', '>', '\'', '\"', + # '%', '@', '`'. + # + # It may also start with + # '-', '?', ':' + # if it is followed by a non-space character. + # + # Note that we limit the last rule to the block context (except the + # '-' character) because we want the flow context to be space + # independent. + ch = self.peek() + return ch not in '\0 \t\r\n\x85\u2028\u2029-?:,[]{}#&*!|>\'\"%@`' \ + or (self.peek(1) not in '\0 \t\r\n\x85\u2028\u2029' + and (ch == '-' or (not self.flow_level and ch in '?:'))) + + # Scanners. + + def scan_to_next_token(self): + # We ignore spaces, line breaks and comments. + # If we find a line break in the block context, we set the flag + # `allow_simple_key` on. + # The byte order mark is stripped if it's the first character in the + # stream. We do not yet support BOM inside the stream as the + # specification requires. Any such mark will be considered as a part + # of the document. + # + # TODO: We need to make tab handling rules more sane. A good rule is + # Tabs cannot precede tokens + # BLOCK-SEQUENCE-START, BLOCK-MAPPING-START, BLOCK-END, + # KEY(block), VALUE(block), BLOCK-ENTRY + # So the checking code is + # if : + # self.allow_simple_keys = False + # We also need to add the check for `allow_simple_keys == True` to + # `unwind_indent` before issuing BLOCK-END. + # Scanners for block, flow, and plain scalars need to be modified. + + if self.index == 0 and self.peek() == '\uFEFF': + self.forward() + found = False + while not found: + while self.peek() == ' ': + self.forward() + if self.peek() == '#': + while self.peek() not in '\0\r\n\x85\u2028\u2029': + self.forward() + if self.scan_line_break(): + if not self.flow_level: + self.allow_simple_key = True + else: + found = True + + def scan_directive(self): + # See the specification for details. + start_mark = self.get_mark() + self.forward() + name = self.scan_directive_name(start_mark) + value = None + if name == 'YAML': + value = self.scan_yaml_directive_value(start_mark) + end_mark = self.get_mark() + elif name == 'TAG': + value = self.scan_tag_directive_value(start_mark) + end_mark = self.get_mark() + else: + end_mark = self.get_mark() + while self.peek() not in '\0\r\n\x85\u2028\u2029': + self.forward() + self.scan_directive_ignored_line(start_mark) + return DirectiveToken(name, value, start_mark, end_mark) + + def scan_directive_name(self, start_mark): + # See the specification for details. + length = 0 + ch = self.peek(length) + while '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \ + or ch in '-_': + length += 1 + ch = self.peek(length) + if not length: + raise ScannerError("while scanning a directive", start_mark, + "expected alphabetic or numeric character, but found %r" + % ch, self.get_mark()) + value = self.prefix(length) + self.forward(length) + ch = self.peek() + if ch not in '\0 \r\n\x85\u2028\u2029': + raise ScannerError("while scanning a directive", start_mark, + "expected alphabetic or numeric character, but found %r" + % ch, self.get_mark()) + return value + + def scan_yaml_directive_value(self, start_mark): + # See the specification for details. + while self.peek() == ' ': + self.forward() + major = self.scan_yaml_directive_number(start_mark) + if self.peek() != '.': + raise ScannerError("while scanning a directive", start_mark, + "expected a digit or '.', but found %r" % self.peek(), + self.get_mark()) + self.forward() + minor = self.scan_yaml_directive_number(start_mark) + if self.peek() not in '\0 \r\n\x85\u2028\u2029': + raise ScannerError("while scanning a directive", start_mark, + "expected a digit or ' ', but found %r" % self.peek(), + self.get_mark()) + return (major, minor) + + def scan_yaml_directive_number(self, start_mark): + # See the specification for details. + ch = self.peek() + if not ('0' <= ch <= '9'): + raise ScannerError("while scanning a directive", start_mark, + "expected a digit, but found %r" % ch, self.get_mark()) + length = 0 + while '0' <= self.peek(length) <= '9': + length += 1 + value = int(self.prefix(length)) + self.forward(length) + return value + + def scan_tag_directive_value(self, start_mark): + # See the specification for details. + while self.peek() == ' ': + self.forward() + handle = self.scan_tag_directive_handle(start_mark) + while self.peek() == ' ': + self.forward() + prefix = self.scan_tag_directive_prefix(start_mark) + return (handle, prefix) + + def scan_tag_directive_handle(self, start_mark): + # See the specification for details. + value = self.scan_tag_handle('directive', start_mark) + ch = self.peek() + if ch != ' ': + raise ScannerError("while scanning a directive", start_mark, + "expected ' ', but found %r" % ch, self.get_mark()) + return value + + def scan_tag_directive_prefix(self, start_mark): + # See the specification for details. + value = self.scan_tag_uri('directive', start_mark) + ch = self.peek() + if ch not in '\0 \r\n\x85\u2028\u2029': + raise ScannerError("while scanning a directive", start_mark, + "expected ' ', but found %r" % ch, self.get_mark()) + return value + + def scan_directive_ignored_line(self, start_mark): + # See the specification for details. + while self.peek() == ' ': + self.forward() + if self.peek() == '#': + while self.peek() not in '\0\r\n\x85\u2028\u2029': + self.forward() + ch = self.peek() + if ch not in '\0\r\n\x85\u2028\u2029': + raise ScannerError("while scanning a directive", start_mark, + "expected a comment or a line break, but found %r" + % ch, self.get_mark()) + self.scan_line_break() + + def scan_anchor(self, TokenClass): + # The specification does not restrict characters for anchors and + # aliases. This may lead to problems, for instance, the document: + # [ *alias, value ] + # can be interpreted in two ways, as + # [ "value" ] + # and + # [ *alias , "value" ] + # Therefore we restrict aliases to numbers and ASCII letters. + start_mark = self.get_mark() + indicator = self.peek() + if indicator == '*': + name = 'alias' + else: + name = 'anchor' + self.forward() + length = 0 + ch = self.peek(length) + while '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \ + or ch in '-_': + length += 1 + ch = self.peek(length) + if not length: + raise ScannerError("while scanning an %s" % name, start_mark, + "expected alphabetic or numeric character, but found %r" + % ch, self.get_mark()) + value = self.prefix(length) + self.forward(length) + ch = self.peek() + if ch not in '\0 \t\r\n\x85\u2028\u2029?:,]}%@`': + raise ScannerError("while scanning an %s" % name, start_mark, + "expected alphabetic or numeric character, but found %r" + % ch, self.get_mark()) + end_mark = self.get_mark() + return TokenClass(value, start_mark, end_mark) + + def scan_tag(self): + # See the specification for details. + start_mark = self.get_mark() + ch = self.peek(1) + if ch == '<': + handle = None + self.forward(2) + suffix = self.scan_tag_uri('tag', start_mark) + if self.peek() != '>': + raise ScannerError("while parsing a tag", start_mark, + "expected '>', but found %r" % self.peek(), + self.get_mark()) + self.forward() + elif ch in '\0 \t\r\n\x85\u2028\u2029': + handle = None + suffix = '!' + self.forward() + else: + length = 1 + use_handle = False + while ch not in '\0 \r\n\x85\u2028\u2029': + if ch == '!': + use_handle = True + break + length += 1 + ch = self.peek(length) + handle = '!' + if use_handle: + handle = self.scan_tag_handle('tag', start_mark) + else: + handle = '!' + self.forward() + suffix = self.scan_tag_uri('tag', start_mark) + ch = self.peek() + if ch not in '\0 \r\n\x85\u2028\u2029': + raise ScannerError("while scanning a tag", start_mark, + "expected ' ', but found %r" % ch, self.get_mark()) + value = (handle, suffix) + end_mark = self.get_mark() + return TagToken(value, start_mark, end_mark) + + def scan_block_scalar(self, style): + # See the specification for details. + + if style == '>': + folded = True + else: + folded = False + + chunks = [] + start_mark = self.get_mark() + + # Scan the header. + self.forward() + chomping, increment = self.scan_block_scalar_indicators(start_mark) + self.scan_block_scalar_ignored_line(start_mark) + + # Determine the indentation level and go to the first non-empty line. + min_indent = self.indent+1 + if min_indent < 1: + min_indent = 1 + if increment is None: + breaks, max_indent, end_mark = self.scan_block_scalar_indentation() + indent = max(min_indent, max_indent) + else: + indent = min_indent+increment-1 + breaks, end_mark = self.scan_block_scalar_breaks(indent) + line_break = '' + + # Scan the inner part of the block scalar. + while self.column == indent and self.peek() != '\0': + chunks.extend(breaks) + leading_non_space = self.peek() not in ' \t' + length = 0 + while self.peek(length) not in '\0\r\n\x85\u2028\u2029': + length += 1 + chunks.append(self.prefix(length)) + self.forward(length) + line_break = self.scan_line_break() + breaks, end_mark = self.scan_block_scalar_breaks(indent) + if self.column == indent and self.peek() != '\0': + + # Unfortunately, folding rules are ambiguous. + # + # This is the folding according to the specification: + + if folded and line_break == '\n' \ + and leading_non_space and self.peek() not in ' \t': + if not breaks: + chunks.append(' ') + else: + chunks.append(line_break) + + # This is Clark Evans's interpretation (also in the spec + # examples): + # + #if folded and line_break == '\n': + # if not breaks: + # if self.peek() not in ' \t': + # chunks.append(' ') + # else: + # chunks.append(line_break) + #else: + # chunks.append(line_break) + else: + break + + # Chomp the tail. + if chomping is not False: + chunks.append(line_break) + if chomping is True: + chunks.extend(breaks) + + # We are done. + return ScalarToken(''.join(chunks), False, start_mark, end_mark, + style) + + def scan_block_scalar_indicators(self, start_mark): + # See the specification for details. + chomping = None + increment = None + ch = self.peek() + if ch in '+-': + if ch == '+': + chomping = True + else: + chomping = False + self.forward() + ch = self.peek() + if ch in '0123456789': + increment = int(ch) + if increment == 0: + raise ScannerError("while scanning a block scalar", start_mark, + "expected indentation indicator in the range 1-9, but found 0", + self.get_mark()) + self.forward() + elif ch in '0123456789': + increment = int(ch) + if increment == 0: + raise ScannerError("while scanning a block scalar", start_mark, + "expected indentation indicator in the range 1-9, but found 0", + self.get_mark()) + self.forward() + ch = self.peek() + if ch in '+-': + if ch == '+': + chomping = True + else: + chomping = False + self.forward() + ch = self.peek() + if ch not in '\0 \r\n\x85\u2028\u2029': + raise ScannerError("while scanning a block scalar", start_mark, + "expected chomping or indentation indicators, but found %r" + % ch, self.get_mark()) + return chomping, increment + + def scan_block_scalar_ignored_line(self, start_mark): + # See the specification for details. + while self.peek() == ' ': + self.forward() + if self.peek() == '#': + while self.peek() not in '\0\r\n\x85\u2028\u2029': + self.forward() + ch = self.peek() + if ch not in '\0\r\n\x85\u2028\u2029': + raise ScannerError("while scanning a block scalar", start_mark, + "expected a comment or a line break, but found %r" % ch, + self.get_mark()) + self.scan_line_break() + + def scan_block_scalar_indentation(self): + # See the specification for details. + chunks = [] + max_indent = 0 + end_mark = self.get_mark() + while self.peek() in ' \r\n\x85\u2028\u2029': + if self.peek() != ' ': + chunks.append(self.scan_line_break()) + end_mark = self.get_mark() + else: + self.forward() + if self.column > max_indent: + max_indent = self.column + return chunks, max_indent, end_mark + + def scan_block_scalar_breaks(self, indent): + # See the specification for details. + chunks = [] + end_mark = self.get_mark() + while self.column < indent and self.peek() == ' ': + self.forward() + while self.peek() in '\r\n\x85\u2028\u2029': + chunks.append(self.scan_line_break()) + end_mark = self.get_mark() + while self.column < indent and self.peek() == ' ': + self.forward() + return chunks, end_mark + + def scan_flow_scalar(self, style): + # See the specification for details. + # Note that we loose indentation rules for quoted scalars. Quoted + # scalars don't need to adhere indentation because " and ' clearly + # mark the beginning and the end of them. Therefore we are less + # restrictive then the specification requires. We only need to check + # that document separators are not included in scalars. + if style == '"': + double = True + else: + double = False + chunks = [] + start_mark = self.get_mark() + quote = self.peek() + self.forward() + chunks.extend(self.scan_flow_scalar_non_spaces(double, start_mark)) + while self.peek() != quote: + chunks.extend(self.scan_flow_scalar_spaces(double, start_mark)) + chunks.extend(self.scan_flow_scalar_non_spaces(double, start_mark)) + self.forward() + end_mark = self.get_mark() + return ScalarToken(''.join(chunks), False, start_mark, end_mark, + style) + + ESCAPE_REPLACEMENTS = { + '0': '\0', + 'a': '\x07', + 'b': '\x08', + 't': '\x09', + '\t': '\x09', + 'n': '\x0A', + 'v': '\x0B', + 'f': '\x0C', + 'r': '\x0D', + 'e': '\x1B', + ' ': '\x20', + '\"': '\"', + '\\': '\\', + '/': '/', + 'N': '\x85', + '_': '\xA0', + 'L': '\u2028', + 'P': '\u2029', + } + + ESCAPE_CODES = { + 'x': 2, + 'u': 4, + 'U': 8, + } + + def scan_flow_scalar_non_spaces(self, double, start_mark): + # See the specification for details. + chunks = [] + while True: + length = 0 + while self.peek(length) not in '\'\"\\\0 \t\r\n\x85\u2028\u2029': + length += 1 + if length: + chunks.append(self.prefix(length)) + self.forward(length) + ch = self.peek() + if not double and ch == '\'' and self.peek(1) == '\'': + chunks.append('\'') + self.forward(2) + elif (double and ch == '\'') or (not double and ch in '\"\\'): + chunks.append(ch) + self.forward() + elif double and ch == '\\': + self.forward() + ch = self.peek() + if ch in self.ESCAPE_REPLACEMENTS: + chunks.append(self.ESCAPE_REPLACEMENTS[ch]) + self.forward() + elif ch in self.ESCAPE_CODES: + length = self.ESCAPE_CODES[ch] + self.forward() + for k in range(length): + if self.peek(k) not in '0123456789ABCDEFabcdef': + raise ScannerError("while scanning a double-quoted scalar", start_mark, + "expected escape sequence of %d hexadecimal numbers, but found %r" % + (length, self.peek(k)), self.get_mark()) + code = int(self.prefix(length), 16) + chunks.append(chr(code)) + self.forward(length) + elif ch in '\r\n\x85\u2028\u2029': + self.scan_line_break() + chunks.extend(self.scan_flow_scalar_breaks(double, start_mark)) + else: + raise ScannerError("while scanning a double-quoted scalar", start_mark, + "found unknown escape character %r" % ch, self.get_mark()) + else: + return chunks + + def scan_flow_scalar_spaces(self, double, start_mark): + # See the specification for details. + chunks = [] + length = 0 + while self.peek(length) in ' \t': + length += 1 + whitespaces = self.prefix(length) + self.forward(length) + ch = self.peek() + if ch == '\0': + raise ScannerError("while scanning a quoted scalar", start_mark, + "found unexpected end of stream", self.get_mark()) + elif ch in '\r\n\x85\u2028\u2029': + line_break = self.scan_line_break() + breaks = self.scan_flow_scalar_breaks(double, start_mark) + if line_break != '\n': + chunks.append(line_break) + elif not breaks: + chunks.append(' ') + chunks.extend(breaks) + else: + chunks.append(whitespaces) + return chunks + + def scan_flow_scalar_breaks(self, double, start_mark): + # See the specification for details. + chunks = [] + while True: + # Instead of checking indentation, we check for document + # separators. + prefix = self.prefix(3) + if (prefix == '---' or prefix == '...') \ + and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029': + raise ScannerError("while scanning a quoted scalar", start_mark, + "found unexpected document separator", self.get_mark()) + while self.peek() in ' \t': + self.forward() + if self.peek() in '\r\n\x85\u2028\u2029': + chunks.append(self.scan_line_break()) + else: + return chunks + + def scan_plain(self): + # See the specification for details. + # We add an additional restriction for the flow context: + # plain scalars in the flow context cannot contain ',' or '?'. + # We also keep track of the `allow_simple_key` flag here. + # Indentation rules are loosed for the flow context. + chunks = [] + start_mark = self.get_mark() + end_mark = start_mark + indent = self.indent+1 + # We allow zero indentation for scalars, but then we need to check for + # document separators at the beginning of the line. + #if indent == 0: + # indent = 1 + spaces = [] + while True: + length = 0 + if self.peek() == '#': + break + while True: + ch = self.peek(length) + if ch in '\0 \t\r\n\x85\u2028\u2029' \ + or (ch == ':' and + self.peek(length+1) in '\0 \t\r\n\x85\u2028\u2029' + + (u',[]{}' if self.flow_level else u''))\ + or (self.flow_level and ch in ',?[]{}'): + break + length += 1 + if length == 0: + break + self.allow_simple_key = False + chunks.extend(spaces) + chunks.append(self.prefix(length)) + self.forward(length) + end_mark = self.get_mark() + spaces = self.scan_plain_spaces(indent, start_mark) + if not spaces or self.peek() == '#' \ + or (not self.flow_level and self.column < indent): + break + return ScalarToken(''.join(chunks), True, start_mark, end_mark) + + def scan_plain_spaces(self, indent, start_mark): + # See the specification for details. + # The specification is really confusing about tabs in plain scalars. + # We just forbid them completely. Do not use tabs in YAML! + chunks = [] + length = 0 + while self.peek(length) in ' ': + length += 1 + whitespaces = self.prefix(length) + self.forward(length) + ch = self.peek() + if ch in '\r\n\x85\u2028\u2029': + line_break = self.scan_line_break() + self.allow_simple_key = True + prefix = self.prefix(3) + if (prefix == '---' or prefix == '...') \ + and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029': + return + breaks = [] + while self.peek() in ' \r\n\x85\u2028\u2029': + if self.peek() == ' ': + self.forward() + else: + breaks.append(self.scan_line_break()) + prefix = self.prefix(3) + if (prefix == '---' or prefix == '...') \ + and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029': + return + if line_break != '\n': + chunks.append(line_break) + elif not breaks: + chunks.append(' ') + chunks.extend(breaks) + elif whitespaces: + chunks.append(whitespaces) + return chunks + + def scan_tag_handle(self, name, start_mark): + # See the specification for details. + # For some strange reasons, the specification does not allow '_' in + # tag handles. I have allowed it anyway. + ch = self.peek() + if ch != '!': + raise ScannerError("while scanning a %s" % name, start_mark, + "expected '!', but found %r" % ch, self.get_mark()) + length = 1 + ch = self.peek(length) + if ch != ' ': + while '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \ + or ch in '-_': + length += 1 + ch = self.peek(length) + if ch != '!': + self.forward(length) + raise ScannerError("while scanning a %s" % name, start_mark, + "expected '!', but found %r" % ch, self.get_mark()) + length += 1 + value = self.prefix(length) + self.forward(length) + return value + + def scan_tag_uri(self, name, start_mark): + # See the specification for details. + # Note: we do not check if URI is well-formed. + chunks = [] + length = 0 + ch = self.peek(length) + while '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \ + or ch in '-;/?:@&=+$,_.!~*\'()[]%': + if ch == '%': + chunks.append(self.prefix(length)) + self.forward(length) + length = 0 + chunks.append(self.scan_uri_escapes(name, start_mark)) + else: + length += 1 + ch = self.peek(length) + if length: + chunks.append(self.prefix(length)) + self.forward(length) + length = 0 + if not chunks: + raise ScannerError("while parsing a %s" % name, start_mark, + "expected URI, but found %r" % ch, self.get_mark()) + return ''.join(chunks) + + def scan_uri_escapes(self, name, start_mark): + # See the specification for details. + codes = [] + mark = self.get_mark() + while self.peek() == '%': + self.forward() + for k in range(2): + if self.peek(k) not in '0123456789ABCDEFabcdef': + raise ScannerError("while scanning a %s" % name, start_mark, + "expected URI escape sequence of 2 hexadecimal numbers, but found %r" + % self.peek(k), self.get_mark()) + codes.append(int(self.prefix(2), 16)) + self.forward(2) + try: + value = bytes(codes).decode('utf-8') + except UnicodeDecodeError as exc: + raise ScannerError("while scanning a %s" % name, start_mark, str(exc), mark) + return value + + def scan_line_break(self): + # Transforms: + # '\r\n' : '\n' + # '\r' : '\n' + # '\n' : '\n' + # '\x85' : '\n' + # '\u2028' : '\u2028' + # '\u2029 : '\u2029' + # default : '' + ch = self.peek() + if ch in '\r\n\x85': + if self.prefix(2) == '\r\n': + self.forward(2) + else: + self.forward() + return '\n' + elif ch in '\u2028\u2029': + self.forward() + return ch + return '' diff --git a/venv/lib/python3.12/site-packages/yaml/serializer.py b/venv/lib/python3.12/site-packages/yaml/serializer.py new file mode 100644 index 00000000..fe911e67 --- /dev/null +++ b/venv/lib/python3.12/site-packages/yaml/serializer.py @@ -0,0 +1,111 @@ + +__all__ = ['Serializer', 'SerializerError'] + +from .error import YAMLError +from .events import * +from .nodes import * + +class SerializerError(YAMLError): + pass + +class Serializer: + + ANCHOR_TEMPLATE = 'id%03d' + + def __init__(self, encoding=None, + explicit_start=None, explicit_end=None, version=None, tags=None): + self.use_encoding = encoding + self.use_explicit_start = explicit_start + self.use_explicit_end = explicit_end + self.use_version = version + self.use_tags = tags + self.serialized_nodes = {} + self.anchors = {} + self.last_anchor_id = 0 + self.closed = None + + def open(self): + if self.closed is None: + self.emit(StreamStartEvent(encoding=self.use_encoding)) + self.closed = False + elif self.closed: + raise SerializerError("serializer is closed") + else: + raise SerializerError("serializer is already opened") + + def close(self): + if self.closed is None: + raise SerializerError("serializer is not opened") + elif not self.closed: + self.emit(StreamEndEvent()) + self.closed = True + + #def __del__(self): + # self.close() + + def serialize(self, node): + if self.closed is None: + raise SerializerError("serializer is not opened") + elif self.closed: + raise SerializerError("serializer is closed") + self.emit(DocumentStartEvent(explicit=self.use_explicit_start, + version=self.use_version, tags=self.use_tags)) + self.anchor_node(node) + self.serialize_node(node, None, None) + self.emit(DocumentEndEvent(explicit=self.use_explicit_end)) + self.serialized_nodes = {} + self.anchors = {} + self.last_anchor_id = 0 + + def anchor_node(self, node): + if node in self.anchors: + if self.anchors[node] is None: + self.anchors[node] = self.generate_anchor(node) + else: + self.anchors[node] = None + if isinstance(node, SequenceNode): + for item in node.value: + self.anchor_node(item) + elif isinstance(node, MappingNode): + for key, value in node.value: + self.anchor_node(key) + self.anchor_node(value) + + def generate_anchor(self, node): + self.last_anchor_id += 1 + return self.ANCHOR_TEMPLATE % self.last_anchor_id + + def serialize_node(self, node, parent, index): + alias = self.anchors[node] + if node in self.serialized_nodes: + self.emit(AliasEvent(alias)) + else: + self.serialized_nodes[node] = True + self.descend_resolver(parent, index) + if isinstance(node, ScalarNode): + detected_tag = self.resolve(ScalarNode, node.value, (True, False)) + default_tag = self.resolve(ScalarNode, node.value, (False, True)) + implicit = (node.tag == detected_tag), (node.tag == default_tag) + self.emit(ScalarEvent(alias, node.tag, implicit, node.value, + style=node.style)) + elif isinstance(node, SequenceNode): + implicit = (node.tag + == self.resolve(SequenceNode, node.value, True)) + self.emit(SequenceStartEvent(alias, node.tag, implicit, + flow_style=node.flow_style)) + index = 0 + for item in node.value: + self.serialize_node(item, node, index) + index += 1 + self.emit(SequenceEndEvent()) + elif isinstance(node, MappingNode): + implicit = (node.tag + == self.resolve(MappingNode, node.value, True)) + self.emit(MappingStartEvent(alias, node.tag, implicit, + flow_style=node.flow_style)) + for key, value in node.value: + self.serialize_node(key, node, None) + self.serialize_node(value, node, key) + self.emit(MappingEndEvent()) + self.ascend_resolver() + diff --git a/venv/lib/python3.12/site-packages/yaml/tokens.py b/venv/lib/python3.12/site-packages/yaml/tokens.py new file mode 100644 index 00000000..4d0b48a3 --- /dev/null +++ b/venv/lib/python3.12/site-packages/yaml/tokens.py @@ -0,0 +1,104 @@ + +class Token(object): + def __init__(self, start_mark, end_mark): + self.start_mark = start_mark + self.end_mark = end_mark + def __repr__(self): + attributes = [key for key in self.__dict__ + if not key.endswith('_mark')] + attributes.sort() + arguments = ', '.join(['%s=%r' % (key, getattr(self, key)) + for key in attributes]) + return '%s(%s)' % (self.__class__.__name__, arguments) + +#class BOMToken(Token): +# id = '' + +class DirectiveToken(Token): + id = '' + def __init__(self, name, value, start_mark, end_mark): + self.name = name + self.value = value + self.start_mark = start_mark + self.end_mark = end_mark + +class DocumentStartToken(Token): + id = '' + +class DocumentEndToken(Token): + id = '' + +class StreamStartToken(Token): + id = '' + def __init__(self, start_mark=None, end_mark=None, + encoding=None): + self.start_mark = start_mark + self.end_mark = end_mark + self.encoding = encoding + +class StreamEndToken(Token): + id = '' + +class BlockSequenceStartToken(Token): + id = '' + +class BlockMappingStartToken(Token): + id = '' + +class BlockEndToken(Token): + id = '' + +class FlowSequenceStartToken(Token): + id = '[' + +class FlowMappingStartToken(Token): + id = '{' + +class FlowSequenceEndToken(Token): + id = ']' + +class FlowMappingEndToken(Token): + id = '}' + +class KeyToken(Token): + id = '?' + +class ValueToken(Token): + id = ':' + +class BlockEntryToken(Token): + id = '-' + +class FlowEntryToken(Token): + id = ',' + +class AliasToken(Token): + id = '' + def __init__(self, value, start_mark, end_mark): + self.value = value + self.start_mark = start_mark + self.end_mark = end_mark + +class AnchorToken(Token): + id = '' + def __init__(self, value, start_mark, end_mark): + self.value = value + self.start_mark = start_mark + self.end_mark = end_mark + +class TagToken(Token): + id = '' + def __init__(self, value, start_mark, end_mark): + self.value = value + self.start_mark = start_mark + self.end_mark = end_mark + +class ScalarToken(Token): + id = '' + def __init__(self, value, plain, start_mark, end_mark, style=None): + self.value = value + self.plain = plain + self.start_mark = start_mark + self.end_mark = end_mark + self.style = style +